comment
stringlengths
1
45k
method_body
stringlengths
23
281k
target_code
stringlengths
0
5.16k
method_body_after
stringlengths
12
281k
context_before
stringlengths
8
543k
context_after
stringlengths
8
543k
This would be more readable with a `switch` statement
private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.get().getKeyType().equals(RSA) || key.get().getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(EC) || key.get().getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key.get(), cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString()))); } }
"The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString())));
private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.getKeyType().toString()))); } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; AtomicReference<JsonWebKey> key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = new AtomicReference<>(jsonWebKey); this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.get().getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key.get()); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key.get()); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.get().getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key.get()); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key.get()); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key.get()); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key.get()); }); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key.get()); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key.get()); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private Mono<Boolean> ensureValidKeyAvailable() { boolean keyAvailable = !(this.key == null && keyCollection != null); if (!keyAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().flatMap(jwk -> { this.key = new AtomicReference<>(jwk); initializeCryptoClients(); return Mono.just(this.key.get().isValid()); }); } else { return getKey().flatMap(kvKey -> { this.key = new AtomicReference<>(kvKey.getKey()); initializeCryptoClients(); return Mono.just(key.get().isValid()); }); } } else { return Mono.defer(() -> Mono.just(true)); } } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; JsonWebKey key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = jsonWebKey; this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); }); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private Mono<Boolean> ensureValidKeyAvailable() { boolean keyNotAvailable = (this.key == null && keyCollection != null); if (keyNotAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().map(jwk -> { this.key = (jwk); initializeCryptoClients(); return this.key.isValid(); }); } else { return getKey().map(kvKey -> { this.key = (kvKey.getKey()); initializeCryptoClients(); return key.isValid(); }); } } else { return Mono.defer(() -> Mono.just(true)); } } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
yup, thanks for the catch. Updated.
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key.get()); }); }
if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) {
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); }); }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; AtomicReference<JsonWebKey> key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = new AtomicReference<>(jsonWebKey); this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.get().getKeyType().equals(RSA) || key.get().getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(EC) || key.get().getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key.get(), cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString()))); } } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.get().getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key.get()); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key.get()); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.get().getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key.get()); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key.get()); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key.get()); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key.get()); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key.get()); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private Mono<Boolean> ensureValidKeyAvailable() { boolean keyAvailable = !(this.key == null && keyCollection != null); if (!keyAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().flatMap(jwk -> { this.key = new AtomicReference<>(jwk); initializeCryptoClients(); return Mono.just(this.key.get().isValid()); }); } else { return getKey().flatMap(kvKey -> { this.key = new AtomicReference<>(kvKey.getKey()); initializeCryptoClients(); return Mono.just(key.get().isValid()); }); } } else { return Mono.defer(() -> Mono.just(true)); } } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; JsonWebKey key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = jsonWebKey; this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.getKeyType().toString()))); } } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private Mono<Boolean> ensureValidKeyAvailable() { boolean keyNotAvailable = (this.key == null && keyCollection != null); if (keyNotAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().map(jwk -> { this.key = (jwk); initializeCryptoClients(); return this.key.isValid(); }); } else { return getKey().map(kvKey -> { this.key = (kvKey.getKey()); initializeCryptoClients(); return key.isValid(); }); } } else { return Mono.defer(() -> Mono.just(true)); } } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
Updated.
private Mono<Boolean> ensureValidKeyAvailable() { boolean keyAvailable = !(this.key == null && keyCollection != null); if (!keyAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().flatMap(jwk -> { this.key = new AtomicReference<>(jwk); initializeCryptoClients(); return Mono.just(this.key.get().isValid()); }); } else { return getKey().flatMap(kvKey -> { this.key = new AtomicReference<>(kvKey.getKey()); initializeCryptoClients(); return Mono.just(key.get().isValid()); }); } } else { return Mono.defer(() -> Mono.just(true)); } }
boolean keyAvailable = !(this.key == null && keyCollection != null);
private Mono<Boolean> ensureValidKeyAvailable() { boolean keyNotAvailable = (this.key == null && keyCollection != null); if (keyNotAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().map(jwk -> { this.key = (jwk); initializeCryptoClients(); return this.key.isValid(); }); } else { return getKey().map(kvKey -> { this.key = (kvKey.getKey()); initializeCryptoClients(); return key.isValid(); }); } } else { return Mono.defer(() -> Mono.just(true)); } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; AtomicReference<JsonWebKey> key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = new AtomicReference<>(jsonWebKey); this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.get().getKeyType().equals(RSA) || key.get().getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(EC) || key.get().getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key.get(), cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString()))); } } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.get().getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key.get()); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key.get()); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.get().getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key.get()); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key.get()); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key.get()); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key.get()); }); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key.get()); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key.get()); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; JsonWebKey key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = jsonWebKey; this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.getKeyType().toString()))); } } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); }); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
Expandable String enum, cannot do that, as discussed.
private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.get().getKeyType().equals(RSA) || key.get().getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(EC) || key.get().getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key.get(), cryptographyServiceClient); } else if (key.get().getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key.get(), cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString()))); } }
"The Json Web Key Type: %s is not supported.", key.get().getKeyType().toString())));
private void initializeCryptoClients() { if (localKeyCryptographyClient != null) { return; } if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) { localKeyCryptographyClient = new RsaKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) { localKeyCryptographyClient = new EcKeyCryptographyClient(key, cryptographyServiceClient); } else if (key.getKeyType().equals(OCT)) { localKeyCryptographyClient = new SymmetricKeyCryptographyClient(key, cryptographyServiceClient); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( "The Json Web Key Type: %s is not supported.", key.getKeyType().toString()))); } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; AtomicReference<JsonWebKey> key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = new AtomicReference<>(jsonWebKey); this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.get().getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key.get()); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key.get()); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.get().getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key.get()); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.get().getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key.get()); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key.get()); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key.get()); }); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key.get()); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.get().getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.get().getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key.get()); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private Mono<Boolean> ensureValidKeyAvailable() { boolean keyAvailable = !(this.key == null && keyCollection != null); if (!keyAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().flatMap(jwk -> { this.key = new AtomicReference<>(jwk); initializeCryptoClients(); return Mono.just(this.key.get().isValid()); }); } else { return getKey().flatMap(kvKey -> { this.key = new AtomicReference<>(kvKey.getKey()); initializeCryptoClients(); return Mono.just(key.get().isValid()); }); } } else { return Mono.defer(() -> Mono.just(true)); } } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
class CryptographyAsyncClient { static final String KEY_VAULT_SCOPE = "https: static final String SECRETS_COLLECTION = "secrets"; JsonWebKey key; private final CryptographyService service; private CryptographyServiceClient cryptographyServiceClient; private LocalKeyCryptographyClient localKeyCryptographyClient; private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class); private String keyCollection; private final String keyId; /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param key the key to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(KeyVaultKey key, HttpPipeline pipeline, CryptographyServiceVersion version) { Objects.requireNonNull(key, "The key vault key is required."); JsonWebKey jsonWebKey = key.getKey(); Objects.requireNonNull(jsonWebKey, "The Json web key is required."); if (!jsonWebKey.isValid()) { throw new IllegalArgumentException("Json Web Key is not valid"); } if (jsonWebKey.getKeyOps() == null) { throw new IllegalArgumentException("Json Web Key's key operations property is not configured"); } if (key.getKeyType() == null) { throw new IllegalArgumentException("Json Web Key's key type property is not configured"); } this.key = jsonWebKey; this.keyId = key.getId(); service = RestProxy.create(CryptographyService.class, pipeline); if (!Strings.isNullOrEmpty(key.getId())) { unpackAndValidateId(key.getId()); cryptographyServiceClient = new CryptographyServiceClient(key.getId(), service); } else { cryptographyServiceClient = null; } initializeCryptoClients(); } /** * Creates a CryptographyAsyncClient that uses {@code pipeline} to service requests * * @param keyId THe Azure Key vault key identifier to use for cryptography operations. * @param pipeline HttpPipeline that the HTTP requests and responses flow through. * @param version {@link CryptographyServiceVersion} of the service to be used when making requests. */ CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) { unpackAndValidateId(keyId); this.keyId = keyId; service = RestProxy.create(CryptographyService.class, pipeline); cryptographyServiceClient = new CryptographyServiceClient(keyId, service); this.key = null; } Mono<String> getKeyId() { return Mono.defer(() -> Mono.just(keyId)); } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.async.cryptographyclient.getKeyWithResponse} * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<KeyVaultKey>> getKeyWithResponse() { try { return withContext(context -> getKeyWithResponse(context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Gets the public part of the configured key. The get key operation is applicable to all key types and it requires * the {@code keys/get} permission. * * <p><strong>Code Samples</strong></p> * <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.cryptographyclient.getKey} * * @return A {@link Mono} containing the requested {@link KeyVaultKey key}. * @throws ResourceNotFoundException when the configured key doesn't exist in the key vault. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<KeyVaultKey> getKey() { try { return getKeyWithResponse().flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) { return cryptographyServiceClient.getKey(context); } Mono<JsonWebKey> getSecretKey() { try { return withContext(context -> cryptographyServiceClient.getSecretKey(context)).flatMap(FluxUtil::toMono); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports a * single block of data, the size of which is dependent on the target key and the encryption algorithm to be used. * The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys public * portion of the key is used for encryption. This operation requires the keys/encrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the * specified {@code plaintext}. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when * a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt * * @param algorithm The algorithm to be used for encryption. * @param plaintext The content to be encrypted. * @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult * contains the encrypted content. * @throws ResourceNotFoundException if the key cannot be found for encryption. * @throws UnsupportedOperationException if the encrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code plainText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) { try { return withContext(context -> encrypt(algorithm, plaintext, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(plaintext, "Plain text content to be encrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.encrypt(algorithm, plaintext, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Encrypt Operation is missing " + "permission/not supported for key with id %s", key.getId())))); } return localKeyCryptographyClient.encryptAsync(algorithm, plaintext, context, key); }); } /** * Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a * single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm to * be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires the * keys/decrypt permission. * * <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting the * specified encrypted content. Possible values for assymetric keys include: * {@link EncryptionAlgorithm * EncryptionAlgorithm * * Possible values for symmetric keys include: {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * {@link EncryptionAlgorithm * EncryptionAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content * details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt * * @param algorithm The algorithm to be used for decryption. * @param cipherText The content to be decrypted. * @return A {@link Mono} containing the decrypted blob. * @throws ResourceNotFoundException if the key cannot be found for decryption. * @throws UnsupportedOperationException if the decrypt operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code cipherText} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) { try { return withContext(context -> decrypt(algorithm, cipherText, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText, Context context) { Objects.requireNonNull(algorithm, "Encryption algorithm cannot be null."); Objects.requireNonNull(cipherText, "Cipher text content to be decrypted cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.decrypt(algorithm, cipherText, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Decrypt Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.decryptAsync(algorithm, cipherText, context, key); }); } /** * Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and * symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the * signature from the digest. Possible values include: * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code digest} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) { try { return withContext(context -> sign(algorithm, digest, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.sign(algorithm, digest, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", key.getId())))); } return localKeyCryptographyClient.signAsync(algorithm, digest, context, key); }); } /** * Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric * keys. In case of asymmetric keys public portion of the key is used to verify the signature . This operation * requires the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify * * @param algorithm The algorithm to use for signing. * @param digest The content from which signature is to be created. * @param signature The signature to be verified. * @return A {@link Mono} containing a {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code digest} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) { try { return withContext(context -> verify(algorithm, digest, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(digest, "Digest content cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verify(algorithm, digest, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Verify Operation is not allowed for " + "key with id %s", key.getId())))); } return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key); }); } /** * Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both * symmetric and asymmetric keys. This operation requires the keys/wrapKey permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified * key content. Possible values include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param key The key content to be wrapped * @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult * key} contains the wrapped key result. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the wrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code key} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) { try { return withContext(context -> wrapKey(algorithm, key, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(key, "Key content to be wrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.wrapKey(algorithm, key, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Wrap Key Operation is not allowed for " + "key with id %s", this.key.getId())))); } return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key); }); } /** * Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation is * the reverse of the wrap operation. * The unwrap operation supports asymmetric and symmetric keys to unwrap. This operation requires the keys/unwrapKey * permission. * * <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the * specified encrypted key content. Possible values for asymmetric keys include: * {@link KeyWrapAlgorithm * KeyWrapAlgorithm * Possible values for symmetric keys include: {@link KeyWrapAlgorithm * KeyWrapAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when a * response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey * * @param algorithm The encryption algorithm to use for wrapping the key. * @param encryptedKey The encrypted key content to unwrap. * @return A {@link Mono} containing a the unwrapped key content. * @throws ResourceNotFoundException if the key cannot be found for wrap operation. * @throws UnsupportedOperationException if the unwrap operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code encryptedKey} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) { try { return withContext(context -> unwrapKey(algorithm, encryptedKey, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) { Objects.requireNonNull(algorithm, "Key Wrap algorithm cannot be null."); Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Unwrap Key Operation is not allowed " + "for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key); }); } /** * Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric * and symmetric keys. This operation requires the keys/sign permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest. * Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a response * has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData * * @param algorithm The algorithm to use for signing. * @param data The content from which signature is to be created. * @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult * the created signature. * @throws ResourceNotFoundException if the key cannot be found for signing. * @throws UnsupportedOperationException if the sign operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm} or {@code data} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) { try { return withContext(context -> signData(algorithm, data, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data to be signed cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.signData(algorithm, data, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format("Sign Operation is not allowed for key " + "with id %s", this.key.getId())))); } return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key); }); } /** * Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric * keys and asymmetric keys. * In case of asymmetric keys public portion of the key is used to verify the signature . This operation requires * the keys/verify permission. * * <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the * signature. Possible values include: * {@link SignatureAlgorithm * ES512}, {@link SignatureAlgorithm * SignatureAlgorithm * {@link SignatureAlgorithm * * <p><strong>Code Samples</strong></p> * <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the * verification details when a response has been received.</p> * {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData * * @param algorithm The algorithm to use for signing. * @param data The raw content against which signature is to be verified. * @param signature The signature to be verified. * @return The {@link Boolean} indicating the signature verification result. * @throws ResourceNotFoundException if the key cannot be found for verifying. * @throws UnsupportedOperationException if the verify operation is not supported or configured on the key. * @throws NullPointerException if {@code algorithm}, {@code data} or {@code signature} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) { try { return withContext(context -> verifyData(algorithm, data, signature, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) { Objects.requireNonNull(algorithm, "Signature algorithm cannot be null."); Objects.requireNonNull(data, "Data cannot be null."); Objects.requireNonNull(signature, "Signature to be verified cannot be null."); return ensureValidKeyAvailable().flatMap(available -> { if (!available) { return cryptographyServiceClient.verifyData(algorithm, data, signature, context); } if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) { return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format( "Verify Operation is not allowed for key with id %s", this.key.getId())))); } return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key); }); } private void unpackAndValidateId(String keyId) { if (CoreUtils.isNullOrEmpty(keyId)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid")); } try { URL url = new URL(keyId); String[] tokens = url.getPath().split("/"); String endpoint = url.getProtocol() + ": String keyName = (tokens.length >= 3 ? tokens[2] : null); String version = (tokens.length >= 4 ? tokens[3] : null); this.keyCollection = (tokens.length >= 2 ? tokens[1] : null); if (Strings.isNullOrEmpty(endpoint)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key endpoint in key id is invalid")); } else if (Strings.isNullOrEmpty(keyName)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key name in key id is invalid")); } else if (Strings.isNullOrEmpty(version)) { throw logger.logExceptionAsError(new IllegalArgumentException("Key version in key id is invalid")); } } catch (MalformedURLException e) { throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed", e)); } } private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) { return operations.contains(keyOperation); } private Mono<Boolean> ensureValidKeyAvailable() { boolean keyNotAvailable = (this.key == null && keyCollection != null); if (keyNotAvailable) { if (keyCollection.equals(SECRETS_COLLECTION)) { return getSecretKey().map(jwk -> { this.key = (jwk); initializeCryptoClients(); return this.key.isValid(); }); } else { return getKey().map(kvKey -> { this.key = (kvKey.getKey()); initializeCryptoClients(); return key.isValid(); }); } } else { return Mono.defer(() -> Mono.just(true)); } } CryptographyServiceClient getCryptographyServiceClient() { return cryptographyServiceClient; } void setCryptographyServiceClient(CryptographyServiceClient serviceClient) { this.cryptographyServiceClient = serviceClient; } }
Would be great to have tests that validate all the properties that are in CBS channel.
public Mono<OffsetDateTime> authorize(String tokenAudience, String scopes) { return cbsChannelMono.flatMap(channel -> credential.getToken(new TokenRequestContext().addScopes(scopes)) .flatMap(accessToken -> { final Message request = Proton.message(); final Map<String, Object> properties = new HashMap<>(); properties.put(PUT_TOKEN_OPERATION, PUT_TOKEN_OPERATION_VALUE); properties.put(PUT_TOKEN_EXPIRY, Date.from(accessToken.getExpiresAt().toInstant())); properties.put(PUT_TOKEN_TYPE, authorizationType.getTokenType()); properties.put(PUT_TOKEN_AUDIENCE, tokenAudience); final ApplicationProperties applicationProperties = new ApplicationProperties(properties); request.setApplicationProperties(applicationProperties); request.setBody(new AmqpValue(accessToken.getToken())); return channel.sendWithAck(request).thenReturn(accessToken.getExpiresAt()); })); }
properties.put(PUT_TOKEN_EXPIRY, Date.from(accessToken.getExpiresAt().toInstant()));
public Mono<OffsetDateTime> authorize(String tokenAudience, String scopes) { return cbsChannelMono.flatMap(channel -> credential.getToken(new TokenRequestContext().addScopes(scopes)) .flatMap(accessToken -> { final Message request = Proton.message(); final Map<String, Object> properties = new HashMap<>(); properties.put(PUT_TOKEN_OPERATION, PUT_TOKEN_OPERATION_VALUE); properties.put(PUT_TOKEN_EXPIRY, Date.from(accessToken.getExpiresAt().toInstant())); properties.put(PUT_TOKEN_TYPE, authorizationType.getTokenType()); properties.put(PUT_TOKEN_AUDIENCE, tokenAudience); final ApplicationProperties applicationProperties = new ApplicationProperties(properties); request.setApplicationProperties(applicationProperties); request.setBody(new AmqpValue(accessToken.getToken())); return channel.sendWithAck(request).thenReturn(accessToken.getExpiresAt()); })); }
class ClaimsBasedSecurityChannel implements ClaimsBasedSecurityNode { static final String PUT_TOKEN_TYPE = "type"; static final String PUT_TOKEN_AUDIENCE = "name"; static final String PUT_TOKEN_EXPIRY = "expiration"; private static final String PUT_TOKEN_OPERATION = "operation"; private static final String PUT_TOKEN_OPERATION_VALUE = "put-token"; private final TokenCredential credential; private final Mono<RequestResponseChannel> cbsChannelMono; private final CbsAuthorizationType authorizationType; private final AmqpRetryOptions retryOptions; public ClaimsBasedSecurityChannel(Mono<RequestResponseChannel> responseChannelMono, TokenCredential tokenCredential, CbsAuthorizationType authorizationType, AmqpRetryOptions retryOptions) { this.authorizationType = Objects.requireNonNull(authorizationType, "'authorizationType' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.credential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); this.cbsChannelMono = Objects.requireNonNull(responseChannelMono, "'responseChannelMono' cannot be null."); } @Override @Override public void close() { final RequestResponseChannel channel = cbsChannelMono.block(retryOptions.getTryTimeout()); if (channel != null) { channel.dispose(); } } }
class ClaimsBasedSecurityChannel implements ClaimsBasedSecurityNode { static final String PUT_TOKEN_TYPE = "type"; static final String PUT_TOKEN_AUDIENCE = "name"; static final String PUT_TOKEN_EXPIRY = "expiration"; private static final String PUT_TOKEN_OPERATION = "operation"; private static final String PUT_TOKEN_OPERATION_VALUE = "put-token"; private final TokenCredential credential; private final Mono<RequestResponseChannel> cbsChannelMono; private final CbsAuthorizationType authorizationType; private final AmqpRetryOptions retryOptions; public ClaimsBasedSecurityChannel(Mono<RequestResponseChannel> responseChannelMono, TokenCredential tokenCredential, CbsAuthorizationType authorizationType, AmqpRetryOptions retryOptions) { this.authorizationType = Objects.requireNonNull(authorizationType, "'authorizationType' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.credential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); this.cbsChannelMono = Objects.requireNonNull(responseChannelMono, "'responseChannelMono' cannot be null."); } @Override @Override public void close() { final RequestResponseChannel channel = cbsChannelMono.block(retryOptions.getTryTimeout()); if (channel != null) { channel.dispose(); } } }
I'll add one.
public Mono<OffsetDateTime> authorize(String tokenAudience, String scopes) { return cbsChannelMono.flatMap(channel -> credential.getToken(new TokenRequestContext().addScopes(scopes)) .flatMap(accessToken -> { final Message request = Proton.message(); final Map<String, Object> properties = new HashMap<>(); properties.put(PUT_TOKEN_OPERATION, PUT_TOKEN_OPERATION_VALUE); properties.put(PUT_TOKEN_EXPIRY, Date.from(accessToken.getExpiresAt().toInstant())); properties.put(PUT_TOKEN_TYPE, authorizationType.getTokenType()); properties.put(PUT_TOKEN_AUDIENCE, tokenAudience); final ApplicationProperties applicationProperties = new ApplicationProperties(properties); request.setApplicationProperties(applicationProperties); request.setBody(new AmqpValue(accessToken.getToken())); return channel.sendWithAck(request).thenReturn(accessToken.getExpiresAt()); })); }
properties.put(PUT_TOKEN_EXPIRY, Date.from(accessToken.getExpiresAt().toInstant()));
public Mono<OffsetDateTime> authorize(String tokenAudience, String scopes) { return cbsChannelMono.flatMap(channel -> credential.getToken(new TokenRequestContext().addScopes(scopes)) .flatMap(accessToken -> { final Message request = Proton.message(); final Map<String, Object> properties = new HashMap<>(); properties.put(PUT_TOKEN_OPERATION, PUT_TOKEN_OPERATION_VALUE); properties.put(PUT_TOKEN_EXPIRY, Date.from(accessToken.getExpiresAt().toInstant())); properties.put(PUT_TOKEN_TYPE, authorizationType.getTokenType()); properties.put(PUT_TOKEN_AUDIENCE, tokenAudience); final ApplicationProperties applicationProperties = new ApplicationProperties(properties); request.setApplicationProperties(applicationProperties); request.setBody(new AmqpValue(accessToken.getToken())); return channel.sendWithAck(request).thenReturn(accessToken.getExpiresAt()); })); }
class ClaimsBasedSecurityChannel implements ClaimsBasedSecurityNode { static final String PUT_TOKEN_TYPE = "type"; static final String PUT_TOKEN_AUDIENCE = "name"; static final String PUT_TOKEN_EXPIRY = "expiration"; private static final String PUT_TOKEN_OPERATION = "operation"; private static final String PUT_TOKEN_OPERATION_VALUE = "put-token"; private final TokenCredential credential; private final Mono<RequestResponseChannel> cbsChannelMono; private final CbsAuthorizationType authorizationType; private final AmqpRetryOptions retryOptions; public ClaimsBasedSecurityChannel(Mono<RequestResponseChannel> responseChannelMono, TokenCredential tokenCredential, CbsAuthorizationType authorizationType, AmqpRetryOptions retryOptions) { this.authorizationType = Objects.requireNonNull(authorizationType, "'authorizationType' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.credential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); this.cbsChannelMono = Objects.requireNonNull(responseChannelMono, "'responseChannelMono' cannot be null."); } @Override @Override public void close() { final RequestResponseChannel channel = cbsChannelMono.block(retryOptions.getTryTimeout()); if (channel != null) { channel.dispose(); } } }
class ClaimsBasedSecurityChannel implements ClaimsBasedSecurityNode { static final String PUT_TOKEN_TYPE = "type"; static final String PUT_TOKEN_AUDIENCE = "name"; static final String PUT_TOKEN_EXPIRY = "expiration"; private static final String PUT_TOKEN_OPERATION = "operation"; private static final String PUT_TOKEN_OPERATION_VALUE = "put-token"; private final TokenCredential credential; private final Mono<RequestResponseChannel> cbsChannelMono; private final CbsAuthorizationType authorizationType; private final AmqpRetryOptions retryOptions; public ClaimsBasedSecurityChannel(Mono<RequestResponseChannel> responseChannelMono, TokenCredential tokenCredential, CbsAuthorizationType authorizationType, AmqpRetryOptions retryOptions) { this.authorizationType = Objects.requireNonNull(authorizationType, "'authorizationType' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.credential = Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); this.cbsChannelMono = Objects.requireNonNull(responseChannelMono, "'responseChannelMono' cannot be null."); } @Override @Override public void close() { final RequestResponseChannel channel = cbsChannelMono.block(retryOptions.getTryTimeout()); if (channel != null) { channel.dispose(); } } }
`configuredLogLevel` is only ever set here in the constructor. I wonder if it is worth doing a micro-optimisation here to determine which log levels are valid and pre-compute the boolean conditions here, to simplify the `isTraceEnabled`, etc methods?
public DefaultLogger(String className) { String classPath; try { classPath = Class.forName(className).getCanonicalName(); } catch (ClassNotFoundException e) { classPath = className; } this.classPath = classPath; this.configuredLogLevel = LogLevel.fromString(Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_LOG_LEVEL)) .getLogLevel(); }
.getLogLevel();
public DefaultLogger(String className) { String classPath; try { classPath = Class.forName(className).getCanonicalName(); } catch (ClassNotFoundException e) { classPath = className; } this.classPath = classPath; int configuredLogLevel = LogLevel.fromString(Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_LOG_LEVEL)) .getLogLevel(); isTraceEnabled = LogLevel.VERBOSE.getLogLevel() > configuredLogLevel; isDebugEnabled = LogLevel.VERBOSE.getLogLevel() >= configuredLogLevel; isInfoEnabled = LogLevel.INFORMATIONAL.getLogLevel() >= configuredLogLevel; isWarnEnabled = LogLevel.WARNING.getLogLevel() >= configuredLogLevel; isErrorEnabled = LogLevel.ERROR.getLogLevel() >= configuredLogLevel; }
class name passes in. */
class name passes in. */
Yeah, I had initially done it that way to have booleans for each of the log levels but the performance improvements were not noticeable by doing that way vs the current approach. Moreover, `is*Enabled` methods in DefaultLogger are much faster than their logback counterparts (557664530 ops/sec vs 501866764 ops/sec). So, didn't want to micro-optimize. However, if you think evaluating it once in the constructor will improve overall readability, I am fine with making this change.
public DefaultLogger(String className) { String classPath; try { classPath = Class.forName(className).getCanonicalName(); } catch (ClassNotFoundException e) { classPath = className; } this.classPath = classPath; this.configuredLogLevel = LogLevel.fromString(Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_LOG_LEVEL)) .getLogLevel(); }
.getLogLevel();
public DefaultLogger(String className) { String classPath; try { classPath = Class.forName(className).getCanonicalName(); } catch (ClassNotFoundException e) { classPath = className; } this.classPath = classPath; int configuredLogLevel = LogLevel.fromString(Configuration.getGlobalConfiguration().get(Configuration.PROPERTY_AZURE_LOG_LEVEL)) .getLogLevel(); isTraceEnabled = LogLevel.VERBOSE.getLogLevel() > configuredLogLevel; isDebugEnabled = LogLevel.VERBOSE.getLogLevel() >= configuredLogLevel; isInfoEnabled = LogLevel.INFORMATIONAL.getLogLevel() >= configuredLogLevel; isWarnEnabled = LogLevel.WARNING.getLogLevel() >= configuredLogLevel; isErrorEnabled = LogLevel.ERROR.getLogLevel() >= configuredLogLevel; }
class name passes in. */
class name passes in. */
does it mean we'll send one span for the first try only? It should be one span per Send call. If there is more than one try - it should wrap all of them: - duration should be duration of send() call - result is the result of all tries (i.e. eventually sent or failed)
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.info("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.info("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.info("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } final Context finalSharedContext = sharedContext != null ? sharedContext : Context.NONE; return withRetry( getSendLink(batch.getPartitionId()).flatMap(link -> { if (isTracingEnabled && !parentContext.get().getData(HOST_NAME_KEY).isPresent()) { Context entityContext = finalSharedContext.addData(ENTITY_PATH_KEY, link.getEntityPath()); parentContext.set(tracerProvider.startSpan( entityContext.addData(HOST_NAME_KEY, link.getHostname()), ProcessKind.SEND)); } return messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages); }) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }) .doOnError(error -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), Signal.error(error)); } }), retryOptions.getTryTimeout(), retryPolicy); }
if (isTracingEnabled && !parentContext.get().getData(HOST_NAME_KEY).isPresent()) {
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
>does it mean we'll send one span for the first try only? yes, it will only start the first send span. > it should wrap all of them does this mean we want to add links between all the send spans but start only a single span?
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.info("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.info("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.info("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } final Context finalSharedContext = sharedContext != null ? sharedContext : Context.NONE; return withRetry( getSendLink(batch.getPartitionId()).flatMap(link -> { if (isTracingEnabled && !parentContext.get().getData(HOST_NAME_KEY).isPresent()) { Context entityContext = finalSharedContext.addData(ENTITY_PATH_KEY, link.getEntityPath()); parentContext.set(tracerProvider.startSpan( entityContext.addData(HOST_NAME_KEY, link.getHostname()), ProcessKind.SEND)); } return messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages); }) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }) .doOnError(error -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), Signal.error(error)); } }), retryOptions.getTryTimeout(), retryPolicy); }
if (isTracingEnabled && !parentContext.get().getData(HOST_NAME_KEY).isPresent()) {
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
We still create span per message and link all of these spans to the single 'send' span
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.info("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.info("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.info("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } final Context finalSharedContext = sharedContext != null ? sharedContext : Context.NONE; return withRetry( getSendLink(batch.getPartitionId()).flatMap(link -> { if (isTracingEnabled && !parentContext.get().getData(HOST_NAME_KEY).isPresent()) { Context entityContext = finalSharedContext.addData(ENTITY_PATH_KEY, link.getEntityPath()); parentContext.set(tracerProvider.startSpan( entityContext.addData(HOST_NAME_KEY, link.getHostname()), ProcessKind.SEND)); } return messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages); }) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }) .doOnError(error -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), Signal.error(error)); } }), retryOptions.getTryTimeout(), retryPolicy); }
if (isTracingEnabled && !parentContext.get().getData(HOST_NAME_KEY).isPresent()) {
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
is it called on each try? will it end the span after first retry?
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
.doOnEach(signal -> {
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
does it mean we no longer set error if all retries has failed?
private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); }
final String partitionKey = options.getPartitionKey();
private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
No, so with the updated code, it nows does `doOnEach` after the `withRetry` has completed. So, once withRetry completes either with `error` or `success` , it will send a signal and the `endSpan` uses the signal type to set error or success.
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
.doOnEach(signal -> {
public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
same as [above](https://github.com/Azure/azure-sdk-for-java/pull/7704/files#r376009331)
private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); }
final String partitionKey = options.getPartitionKey();
private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) { final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the" + " other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } return getSendLink(options.getPartitionId()) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; final CreateBatchOptions batchOptions = new CreateBatchOptions() .setPartitionKey(options.getPartitionKey()) .setPartitionId(options.getPartitionId()) .setMaximumSizeInBytes(batchSize); return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext, tracerProvider)); }) .flatMap(list -> sendInternal(Flux.fromIterable(list)))); }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
class EventHubProducerAsyncClient implements Closeable { private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s"; private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions(); private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions(); private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final String fullyQualifiedNamespace; private final String eventHubName; private final EventHubConnectionProcessor connectionProcessor; private final AmqpRetryOptions retryOptions; private final AmqpRetryPolicy retryPolicy; private final TracerProvider tracerProvider; private final MessageSerializer messageSerializer; private final boolean isSharedConnection; /** * Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition * when {@link CreateBatchOptions * load balance the messages amongst available partitions. */ EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName, EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider, MessageSerializer messageSerializer, boolean isSharedConnection) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); this.connectionProcessor = Objects.requireNonNull(connectionProcessor, "'connectionProcessor' cannot be null."); this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null."); this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null."); this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null."); this.retryPolicy = getRetryPolicy(retryOptions); this.isSharedConnection = isSharedConnection; } /** * Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to * {@code {yournamespace}.servicebus.windows.net}. * * @return The fully qualified Event Hubs namespace that the connection is associated with. */ public String getFullyQualifiedNamespace() { return fullyQualifiedNamespace; } /** * Gets the Event Hub name this client interacts with. * * @return The Event Hub name this client interacts with. */ public String getEventHubName() { return eventHubName; } /** * Retrieves information about an Event Hub, including the number of partitions present and their identifiers. * * @return The set of information for the Event Hub that this client is associated with. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<EventHubProperties> getEventHubProperties() { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(EventHubManagementNode::getEventHubProperties); } /** * Retrieves the identifiers for the partitions of an Event Hub. * * @return A Flux of identifiers for the partitions of an Event Hub. */ public Flux<String> getPartitionIds() { return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds())); } /** * Retrieves information about a specific partition for an Event Hub, including elements that describe the available * events in the partition event stream. * * @param partitionId The unique identifier of a partition associated with the Event Hub. * @return The set of information for the requested partition under the Event Hub this client is associated with. * @throws NullPointerException if {@code partitionId} is null. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PartitionProperties> getPartitionProperties(String partitionId) { return connectionProcessor.flatMap(connection -> connection.getManagementNode()) .flatMap(node -> node.getPartitionProperties(partitionId)); } /** * Creates an {@link EventDataBatch} that can fit as many events as the transport allows. * * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. */ public Mono<EventDataBatch> createBatch() { return createBatch(DEFAULT_BATCH_OPTIONS); } /** * Creates an {@link EventDataBatch} configured with the options specified. * * @param options A set of options used to configure the {@link EventDataBatch}. * @return A new {@link EventDataBatch} that can fit as many events as the transport allows. * @throws NullPointerException if {@code options} is null. */ public Mono<EventDataBatch> createBatch(CreateBatchOptions options) { if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } final String partitionKey = options.getPartitionKey(); final String partitionId = options.getPartitionId(); final int batchMaxSize = options.getMaximumSizeInBytes(); if (!CoreUtils.isNullOrEmpty(partitionKey) && !CoreUtils.isNullOrEmpty(partitionId)) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. " + "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'", partitionKey, partitionId))); } else if (!CoreUtils.isNullOrEmpty(partitionKey) && partitionKey.length() > MAX_PARTITION_KEY_LENGTH) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey, MAX_PARTITION_KEY_LENGTH))); } return getSendLink(partitionId) .flatMap(link -> link.getLinkSize() .flatMap(size -> { final int maximumLinkSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES; if (batchMaxSize > maximumLinkSize) { return monoError(logger, new IllegalArgumentException(String.format(Locale.US, "BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).", batchMaxSize, maximumLinkSize))); } final int batchSize = batchMaxSize > 0 ? batchMaxSize : maximumLinkSize; return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext, tracerProvider)); })); } /** * Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size * allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } return send(Flux.just(event)); } /** * Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds * the maximum size allowed, an exception will be triggered and the send will fail. * * <p> * For more information regarding the maximum event size allowed, see * <a href="https: * Limits</a>. * </p> * * @param event Event to send to the service. * @param options The set of options to consider when sending this event. * @return A {@link Mono} that completes when the event is pushed to the service. */ Mono<Void> send(EventData event, SendOptions options) { if (event == null) { return monoError(logger, new NullPointerException("'event' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.just(event), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(Flux.fromIterable(events)); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Iterable<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return send(Flux.fromIterable(events), options); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } return send(events, DEFAULT_SEND_OPTIONS); } /** * Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the * maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message * size is the max amount allowed on the link. * * @param events Events to send to the service. * @param options The set of options to consider when sending this batch. * @return A {@link Mono} that completes when all events are pushed to the service. */ Mono<Void> send(Flux<EventData> events, SendOptions options) { if (events == null) { return monoError(logger, new NullPointerException("'events' cannot be null.")); } else if (options == null) { return monoError(logger, new NullPointerException("'options' cannot be null.")); } return sendInternal(events, options); } /** * Sends the batch to the associated Event Hub. * * @param batch The batch to send to the service. * @return A {@link Mono} that completes when the batch is pushed to the service. * @throws NullPointerException if {@code batch} is {@code null}. * @see EventHubProducerAsyncClient * @see EventHubProducerAsyncClient */ public Mono<Void> send(EventDataBatch batch) { if (batch == null) { return monoError(logger, new NullPointerException("'batch' cannot be null.")); } else if (batch.getEvents().isEmpty()) { logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY); return Mono.empty(); } if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) { logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId()); } else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) { logger.verbose("Sending batch with size[{}] with partitionKey[{}].", batch.getCount(), batch.getPartitionKey()); } else { logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount()); } final String partitionKey = batch.getPartitionKey(); final boolean isTracingEnabled = tracerProvider.isEnabled(); final AtomicReference<Context> parentContext = isTracingEnabled ? new AtomicReference<>(Context.NONE) : null; Context sharedContext = null; final List<Message> messages = new ArrayList<>(); for (int i = 0; i < batch.getEvents().size(); i++) { final EventData event = batch.getEvents().get(i); if (isTracingEnabled) { parentContext.set(event.getContext()); if (i == 0) { sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get()); } tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext())); } final Message message = messageSerializer.serialize(event); if (!CoreUtils.isNullOrEmpty(partitionKey)) { final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null ? new MessageAnnotations(new HashMap<>()) : message.getMessageAnnotations(); messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey); message.setMessageAnnotations(messageAnnotations); } messages.add(message); } if (isTracingEnabled) { final Context finalSharedContext = sharedContext == null ? Context.NONE : sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace); parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND)); } return withRetry(getSendLink(batch.getPartitionId()) .flatMap(link -> messages.size() == 1 ? link.send(messages.get(0)) : link.send(messages)), retryOptions.getTryTimeout(), retryPolicy) .doOnEach(signal -> { if (isTracingEnabled) { tracerProvider.endSpan(parentContext.get(), signal); } }); } private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) { return eventBatches .flatMap(this::send) .then() .doOnError(error -> { logger.error(Messages.ERROR_SENDING_BATCH, error); }); } private String getEntityPath(String partitionId) { return CoreUtils.isNullOrEmpty(partitionId) ? eventHubName : String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId); } private Mono<AmqpSendLink> getSendLink(String partitionId) { final String entityPath = getEntityPath(partitionId); final String linkName = getEntityPath(partitionId); return connectionProcessor .flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions)); } /** * Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying * connection is also closed. */ @Override public void close() { if (isDisposed.getAndSet(true)) { return; } if (!isSharedConnection) { connectionProcessor.dispose(); } } /** * Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then * it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code * maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link * AmqpErrorCondition */ private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>, List<EventDataBatch>> { private final String partitionKey; private final String partitionId; private final int maxMessageSize; private final Integer maxNumberOfBatches; private final ErrorContextProvider contextProvider; private final TracerProvider tracerProvider; private volatile EventDataBatch currentBatch; EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider, TracerProvider tracerProvider) { this.maxNumberOfBatches = maxNumberOfBatches; this.maxMessageSize = options.getMaximumSizeInBytes() > 0 ? options.getMaximumSizeInBytes() : MAX_MESSAGE_LENGTH_BYTES; this.partitionKey = options.getPartitionKey(); this.partitionId = options.getPartitionId(); this.contextProvider = contextProvider; this.tracerProvider = tracerProvider; currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); } @Override public Supplier<List<EventDataBatch>> supplier() { return ArrayList::new; } @Override public BiConsumer<List<EventDataBatch>, EventData> accumulator() { return (list, event) -> { EventDataBatch batch = currentBatch; if (batch.tryAdd(event)) { return; } if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) { final String message = String.format(Locale.US, Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches); throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message, contextProvider.getErrorContext()); } currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider, tracerProvider); currentBatch.tryAdd(event); list.add(batch); }; } @Override public BinaryOperator<List<EventDataBatch>> combiner() { return (existing, another) -> { existing.addAll(another); return existing; }; } @Override public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() { return list -> { EventDataBatch batch = currentBatch; currentBatch = null; if (batch != null) { list.add(batch); } return list; }; } @Override public Set<Characteristics> characteristics() { return Collections.emptySet(); } } }
Can you add a comment that this is to accommodate the root?
public ShareFileAsyncClient getFileClient(String fileName) { String filePath = directoryPath + "/" + fileName; if (directoryPath.isEmpty()) { filePath = fileName; } return new ShareFileAsyncClient(azureFileStorageClient, shareName, filePath, null, accountName, serviceVersion); }
filePath = fileName;
public ShareFileAsyncClient getFileClient(String fileName) { String filePath = directoryPath + "/" + fileName; if (directoryPath.isEmpty()) { filePath = fileName; } return new ShareFileAsyncClient(azureFileStorageClient, shareName, filePath, null, accountName, serviceVersion); }
class ShareDirectoryAsyncClient { private final ClientLogger logger = new ClientLogger(ShareDirectoryAsyncClient.class); private final AzureFileStorageImpl azureFileStorageClient; private final String shareName; private final String directoryPath; private final String snapshot; private final String accountName; private final ShareServiceVersion serviceVersion; /** * Creates a ShareDirectoryAsyncClient that sends requests to the storage directory at {@link * AzureFileStorageImpl * {@code client}. * * @param azureFileStorageClient Client that interacts with the service interfaces * @param shareName Name of the share * @param directoryPath Name of the directory * @param snapshot The snapshot of the share */ ShareDirectoryAsyncClient(AzureFileStorageImpl azureFileStorageClient, String shareName, String directoryPath, String snapshot, String accountName, ShareServiceVersion serviceVersion) { Objects.requireNonNull(shareName, "'shareName' cannot be null."); Objects.requireNonNull(directoryPath); this.shareName = shareName; this.directoryPath = directoryPath; this.snapshot = snapshot; this.azureFileStorageClient = azureFileStorageClient; this.accountName = accountName; this.serviceVersion = serviceVersion; } /** * Get the url of the storage directory client. * * @return the URL of the storage directory client */ public String getDirectoryUrl() { StringBuilder directoryUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/") .append(shareName).append("/").append(directoryPath); if (snapshot != null) { directoryUrlString.append("?snapshot=").append(snapshot); } return directoryUrlString.toString(); } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public ShareServiceVersion getServiceVersion() { return serviceVersion; } /** * Constructs a ShareFileAsyncClient that interacts with the specified file. * * <p>If the file doesn't exist in this directory {@link ShareFileAsyncClient
class ShareDirectoryAsyncClient { private final ClientLogger logger = new ClientLogger(ShareDirectoryAsyncClient.class); private final AzureFileStorageImpl azureFileStorageClient; private final String shareName; private final String directoryPath; private final String snapshot; private final String accountName; private final ShareServiceVersion serviceVersion; /** * Creates a ShareDirectoryAsyncClient that sends requests to the storage directory at {@link * AzureFileStorageImpl * {@code client}. * * @param azureFileStorageClient Client that interacts with the service interfaces * @param shareName Name of the share * @param directoryPath Name of the directory * @param snapshot The snapshot of the share */ ShareDirectoryAsyncClient(AzureFileStorageImpl azureFileStorageClient, String shareName, String directoryPath, String snapshot, String accountName, ShareServiceVersion serviceVersion) { Objects.requireNonNull(shareName, "'shareName' cannot be null."); Objects.requireNonNull(directoryPath); this.shareName = shareName; this.directoryPath = directoryPath; this.snapshot = snapshot; this.azureFileStorageClient = azureFileStorageClient; this.accountName = accountName; this.serviceVersion = serviceVersion; } /** * Get the url of the storage directory client. * * @return the URL of the storage directory client */ public String getDirectoryUrl() { StringBuilder directoryUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/") .append(shareName).append("/").append(directoryPath); if (snapshot != null) { directoryUrlString.append("?snapshot=").append(snapshot); } return directoryUrlString.toString(); } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public ShareServiceVersion getServiceVersion() { return serviceVersion; } /** * Constructs a ShareFileAsyncClient that interacts with the specified file. * * <p>If the file doesn't exist in this directory {@link ShareFileAsyncClient
done
public ShareFileAsyncClient getFileClient(String fileName) { String filePath = directoryPath + "/" + fileName; if (directoryPath.isEmpty()) { filePath = fileName; } return new ShareFileAsyncClient(azureFileStorageClient, shareName, filePath, null, accountName, serviceVersion); }
filePath = fileName;
public ShareFileAsyncClient getFileClient(String fileName) { String filePath = directoryPath + "/" + fileName; if (directoryPath.isEmpty()) { filePath = fileName; } return new ShareFileAsyncClient(azureFileStorageClient, shareName, filePath, null, accountName, serviceVersion); }
class ShareDirectoryAsyncClient { private final ClientLogger logger = new ClientLogger(ShareDirectoryAsyncClient.class); private final AzureFileStorageImpl azureFileStorageClient; private final String shareName; private final String directoryPath; private final String snapshot; private final String accountName; private final ShareServiceVersion serviceVersion; /** * Creates a ShareDirectoryAsyncClient that sends requests to the storage directory at {@link * AzureFileStorageImpl * {@code client}. * * @param azureFileStorageClient Client that interacts with the service interfaces * @param shareName Name of the share * @param directoryPath Name of the directory * @param snapshot The snapshot of the share */ ShareDirectoryAsyncClient(AzureFileStorageImpl azureFileStorageClient, String shareName, String directoryPath, String snapshot, String accountName, ShareServiceVersion serviceVersion) { Objects.requireNonNull(shareName, "'shareName' cannot be null."); Objects.requireNonNull(directoryPath); this.shareName = shareName; this.directoryPath = directoryPath; this.snapshot = snapshot; this.azureFileStorageClient = azureFileStorageClient; this.accountName = accountName; this.serviceVersion = serviceVersion; } /** * Get the url of the storage directory client. * * @return the URL of the storage directory client */ public String getDirectoryUrl() { StringBuilder directoryUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/") .append(shareName).append("/").append(directoryPath); if (snapshot != null) { directoryUrlString.append("?snapshot=").append(snapshot); } return directoryUrlString.toString(); } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public ShareServiceVersion getServiceVersion() { return serviceVersion; } /** * Constructs a ShareFileAsyncClient that interacts with the specified file. * * <p>If the file doesn't exist in this directory {@link ShareFileAsyncClient
class ShareDirectoryAsyncClient { private final ClientLogger logger = new ClientLogger(ShareDirectoryAsyncClient.class); private final AzureFileStorageImpl azureFileStorageClient; private final String shareName; private final String directoryPath; private final String snapshot; private final String accountName; private final ShareServiceVersion serviceVersion; /** * Creates a ShareDirectoryAsyncClient that sends requests to the storage directory at {@link * AzureFileStorageImpl * {@code client}. * * @param azureFileStorageClient Client that interacts with the service interfaces * @param shareName Name of the share * @param directoryPath Name of the directory * @param snapshot The snapshot of the share */ ShareDirectoryAsyncClient(AzureFileStorageImpl azureFileStorageClient, String shareName, String directoryPath, String snapshot, String accountName, ShareServiceVersion serviceVersion) { Objects.requireNonNull(shareName, "'shareName' cannot be null."); Objects.requireNonNull(directoryPath); this.shareName = shareName; this.directoryPath = directoryPath; this.snapshot = snapshot; this.azureFileStorageClient = azureFileStorageClient; this.accountName = accountName; this.serviceVersion = serviceVersion; } /** * Get the url of the storage directory client. * * @return the URL of the storage directory client */ public String getDirectoryUrl() { StringBuilder directoryUrlString = new StringBuilder(azureFileStorageClient.getUrl()).append("/") .append(shareName).append("/").append(directoryPath); if (snapshot != null) { directoryUrlString.append("?snapshot=").append(snapshot); } return directoryUrlString.toString(); } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public ShareServiceVersion getServiceVersion() { return serviceVersion; } /** * Constructs a ShareFileAsyncClient that interacts with the specified file. * * <p>If the file doesn't exist in this directory {@link ShareFileAsyncClient
Should log these and others
public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); }
throw new IllegalArgumentException();
public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
Should use splitToElements
public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; }
String firstElement = pathString.split(parentFileSystem.getSeparator())[0];
public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
Just use .equals. It's clear and safer and evidently equivalent.
public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
if (path.getFileSystem() != this.parentFileSystem) {
public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
What about a path with ".."? In other words, should we normalize first?
public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
if (!otherPathElements[i].equals(thisPathElements[i])) {
public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
Can remove the todo as the root validity check is gone.
public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); }
Path root = this.getRoot();
public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
Add a comment here to explain that this is the special case where we split after removing the root of a path that is just the root.
private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; }
if (arr.length == 1 && arr[0].isEmpty()) {
private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } }
I think it makes sense to normalize before comparing
public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
if (!otherPathElements[i].equals(thisPathElements[i])) {
public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
It looks like the default system (which I'm generally trying to follow) doesn't normalize and will return false for "foo/bar" starts with "foo/.", so I'll leave this as is.
public boolean startsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
if (!otherPathElements[i].equals(thisPathElements[i])) {
public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) { if (strings == null) { strings = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(s.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(strings) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = pathString.split(parentFileSystem.getSeparator())[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw new IllegalArgumentException(); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException(); } /** * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
Any reason to use a reactive stream here only to block it? Instead could a `Stream` be used? Another option, based on the logic here, we could have a local instance of the `String[]` and just access and return the last element in the list.
public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } }
return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block());
public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { if (more == null) { more = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(first.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(more) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
Given there is numerous calls to this method should the split pathString be stored as a field on the instance?
private String[] splitToElements() { return this.splitToElements(this.pathString); }
return this.splitToElements(this.pathString);
private String[] splitToElements() { return this.splitToElements(this.pathString); }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { if (more == null) { more = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(first.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(more) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
I was wondering that as I wrote it. The consequence is that these objects basically double their memory usage, right? Since we would then store the pathString as one object and then all of its components again separately. I didn't feel like I had enough knowledge of customer scenarios to know if they were cpu or memory bound, so I was going to defer that decision until we hear one way or the other from customers unless you feel strongly about it. Does that make sense?
private String[] splitToElements() { return this.splitToElements(this.pathString); }
return this.splitToElements(this.pathString);
private String[] splitToElements() { return this.splitToElements(this.pathString); }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { if (more == null) { more = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(first.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(more) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
I think I addressed the Stream concern above and the String[] concern below :)
public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } }
return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block());
public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { if (more == null) { more = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(first.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(more) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements() { return this.splitToElements(this.pathString); } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
That makes sense to me
private String[] splitToElements() { return this.splitToElements(this.pathString); }
return this.splitToElements(this.pathString);
private String[] splitToElements() { return this.splitToElements(this.pathString); }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { if (more == null) { more = new String[0]; } this.parentFileSystem = parentFileSystem; Flux<String> elementFlux = Flux.fromArray(first.split(this.parentFileSystem.getSeparator())) .concatWith(Flux.fromArray(more) .flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator())))) .filter(str -> !str.isEmpty()); this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable()); elementFlux.skip(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an invalid character except to identify the root element of this path if there is one."))) : Mono.just(str)).blockLast(); elementFlux.take(1) .flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1 ? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path"))) : Mono.just(str)).blockLast(); } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block()); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot())) .skip(begin) .take(end - begin) .toIterable(); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class)); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class)); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Flux.fromArray(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toIterable() .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores()) .map(FileStore::name) .hasElement(fileStoreName) .block(); return validRootName != null && validRootName; } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
class AzurePath implements Path { private final ClientLogger logger = new ClientLogger(AzurePath.class); private static final String ROOT_DIR_SUFFIX = ":"; private final AzureFileSystem parentFileSystem; private final String pathString; AzurePath(AzureFileSystem parentFileSystem, String first, String... more) { this.parentFileSystem = parentFileSystem; /* Break all strings into their respective elements and remove empty elements. This has the effect of stripping any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join. */ List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator()))); if (more != null) { for (String next : more) { elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator()))); } } elements.removeIf(String::isEmpty); this.pathString = String.join(this.parentFileSystem.getSeparator(), elements); for (int i = 0; i < elements.size(); i++) { String element = elements.get(i); /* If there is a root component, it must be the first element. A root component takes the format of "<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last character of the first element. */ if (i == 0) { if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may" + " only be used as the last character in the root component of a path")); } } else if (element.contains(ROOT_DIR_SUFFIX)) { throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an " + "invalid character except to identify the root element of this path if there is one.")); } } } /** * {@inheritDoc} */ @Override public FileSystem getFileSystem() { return this.parentFileSystem; } /** * A path is considered absolute in this file system if it contains a root component. * * {@inheritDoc} */ @Override public boolean isAbsolute() { return this.getRoot() != null; } /** * The root component of this path also identifies the Azure Storage Container in which the file is stored. This * method will not validate that the root component corresponds to an actual file store/container in this * file system. It will simply return the root component of the path if one is present and syntactically valid. * * {@inheritDoc} */ @Override public Path getRoot() { String firstElement = this.splitToElements()[0]; if (firstElement.endsWith(ROOT_DIR_SUFFIX)) { return this.parentFileSystem.getPath(firstElement); } return null; } /** * {@inheritDoc} */ @Override public Path getFileName() { if (this.withoutRoot().isEmpty()) { return null; } else { List<String> elements = Arrays.asList(this.splitToElements()); return this.parentFileSystem.getPath(elements.get(elements.size() - 1)); } } /** * {@inheritDoc} */ @Override public Path getParent() { /* If this path only has one element, there is no parent. Note the root is included in the parent, so we don't use getNameCount here. */ if (this.splitToElements().length == 1) { return null; } return this.parentFileSystem.getPath( this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator()))); } /** * {@inheritDoc} */ @Override public int getNameCount() { return this.splitToElements(this.withoutRoot()).length; } /** * {@inheritDoc} */ @Override public Path getName(int i) { if (i < 0 || i >= this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i))); } return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]); } /** * {@inheritDoc} */ @Override public Path subpath(int begin, int end) { if (begin < 0 || begin >= this.getNameCount() || end <= begin || end > this.getNameCount()) { throw Utility.logError(logger, new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end))); } String[] subnames = Stream.of(this.splitToElements(this.withoutRoot())) .skip(begin) .limit(end - begin) .toArray(String[]::new); return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames)); } /** * In this implementation, a root component starts with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean startsWith(Path path) { if (!path.getFileSystem().equals(this.parentFileSystem)) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).splitToElements(); if (otherPathElements.length > thisPathElements.length) { return false; } for (int i = 0; i < otherPathElements.length; i++) { if (!otherPathElements[i].equals(thisPathElements[i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean startsWith(String s) { return this.startsWith(this.parentFileSystem.getPath(s)); } /** * In this implementation, a root component ends with another root component if the two root components are * equivalent strings. In other words, if the files are stored in the same container. * * {@inheritDoc} */ @Override public boolean endsWith(Path path) { /* There can only be one instance of a file system with a given id, so comparing object identity is equivalent to checking ids here. */ if (path.getFileSystem() != this.parentFileSystem) { return false; } String[] thisPathElements = this.splitToElements(); String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator()); if (otherPathElements.length > thisPathElements.length) { return false; } if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) { return false; } for (int i = 1; i <= otherPathElements.length; i++) { if (!otherPathElements[otherPathElements.length - i] .equals(thisPathElements[thisPathElements.length - i])) { return false; } } return true; } /** * {@inheritDoc} */ @Override public boolean endsWith(String s) { return this.endsWith(this.parentFileSystem.getPath(s)); } /** * This file system follows the standard practice mentioned in the original docs. * * {@inheritDoc} */ @Override public Path normalize() { Deque<String> stack = new ArrayDeque<>(); String[] pathElements = this.splitToElements(); Path root = this.getRoot(); String rootStr = root == null ? null : root.toString(); for (String element : pathElements) { if (element.equals(".")) { continue; } else if (element.equals("..")) { if (rootStr != null) { if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) { continue; } else { stack.removeLast(); } } else { if (stack.isEmpty()) { stack.addLast(element); } else if (stack.peek().equals("..")) { stack.addLast(element); } else { stack.removeLast(); } } } else { stack.addLast(element); } } return this.parentFileSystem.getPath("", stack.toArray(new String[0])); } /** * If the other path has a root component, it is considered absolute, and it is returned. * * {@inheritDoc} */ @Override public Path resolve(Path path) { if (path.isAbsolute()) { return path; } if (path.getNameCount() == 0) { return this; } return this.parentFileSystem.getPath(this.toString(), path.toString()); } /** * {@inheritDoc} */ @Override public Path resolve(String s) { return this.resolve(this.parentFileSystem.getPath(s)); } /** * {@inheritDoc} */ @Override public Path resolveSibling(Path path) { if (path.isAbsolute()) { return path; } Path parent = this.getParent(); return parent == null ? path : parent.resolve(path); } /** * {@inheritDoc} */ @Override public Path resolveSibling(String s) { return this.resolveSibling(this.parentFileSystem.getPath(s)); } /** * If both paths have a root component, it is still to relativize one against the other. * * {@inheritDoc} */ @Override public Path relativize(Path path) { if (path.getRoot() == null ^ this.getRoot() == null) { throw Utility.logError(logger, new IllegalArgumentException("Both paths must be absolute or neither can be")); } AzurePath thisNormalized = (AzurePath) this.normalize(); Path otherNormalized = path.normalize(); Deque<String> deque = new ArrayDeque<>( Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator()))); int i = 0; String[] thisElements = thisNormalized.splitToElements(); while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) { deque.removeFirst(); i++; } while (i < thisElements.length) { deque.addFirst(".."); i++; } return this.parentFileSystem.getPath("", deque.toArray(new String[0])); } /** * No authority component is defined for the {@code URI} returned by this method. This implementation offers the * same equivalence guarantee as the default provider. * * {@inheritDoc} */ @Override public URI toUri() { try { return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(), null, null); } catch (URISyntaxException e) { throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e)); } } /** * {@inheritDoc} */ @Override public Path toAbsolutePath() { if (this.isAbsolute()) { return this; } return this.parentFileSystem.getDefaultDirectory().resolve(this); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public Path toRealPath(LinkOption... linkOptions) throws IOException { throw new UnsupportedOperationException("Symbolic links are not supported."); } /** * {@inheritDoc} */ @Override public File toFile() { throw new UnsupportedOperationException(); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported. * <p> * {@inheritDoc} */ @Override public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException { throw new UnsupportedOperationException("WatchEvents are not supported."); } /** * Unsupported * <p> * {@inheritDoc} */ @Override public Iterator<Path> iterator() { return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot())) .map(s -> this.parentFileSystem.getPath(s)) .toArray(Path[]::new)) .iterator(); } /** * This result of this method is identical to a string comparison on the underlying path strings. * * {@inheritDoc} */ @Override public int compareTo(Path path) { if (!(path instanceof AzurePath)) { throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath.")); } return this.pathString.compareTo(((AzurePath) path).pathString); } /** * {@inheritDoc} */ @Override public String toString() { return this.pathString; } /** * A path is considered equal to another path if it is associated with the same file system instance and if the * path strings are equivalent. * * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AzurePath paths = (AzurePath) o; return Objects.equals(parentFileSystem, paths.parentFileSystem) && Objects.equals(pathString, paths.pathString); } @Override public int hashCode() { return Objects.hash(parentFileSystem, pathString); } boolean validRoot(String fileStoreName) { return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false) .map(FileStore::name) .anyMatch(fileStoreName::equals); } private String withoutRoot() { Path root = this.getRoot(); String str = this.pathString; if (root != null) { str = this.pathString.substring(root.toString().length()); } if (str.startsWith(this.parentFileSystem.getSeparator())) { str = str.substring(1); } return str; } private String[] splitToElements(String str) { String[] arr = str.split(this.parentFileSystem.getSeparator()); /* This is a special case where we split after removing the root from a path that is just the root. Or otherwise have an empty path. */ if (arr.length == 1 && arr[0].isEmpty()) { return new String[0]; } return arr; } }
```suggestion .map(ByteBuffer::wrap) ```
public BufferedHttpResponse(HttpResponse innerHttpResponse) { super(innerHttpResponse.getRequest()); this.innerHttpResponse = innerHttpResponse; this.cachedBody = FluxUtil.collectBytesInByteBufferStream(innerHttpResponse.getBody()) .map(bytes -> ByteBuffer.wrap(bytes)) .flux() .cache(); }
.map(bytes -> ByteBuffer.wrap(bytes))
public BufferedHttpResponse(HttpResponse innerHttpResponse) { super(innerHttpResponse.getRequest()); this.innerHttpResponse = innerHttpResponse; this.cachedBody = FluxUtil.collectBytesInByteBufferStream(innerHttpResponse.getBody()) .map(ByteBuffer::wrap) .flux() .cache(); }
class BufferedHttpResponse extends HttpResponse { private final HttpResponse innerHttpResponse; private final Flux<ByteBuffer> cachedBody; /** * Creates a buffered HTTP response. * * @param innerHttpResponse The HTTP response to buffer */ @Override public int getStatusCode() { return innerHttpResponse.getStatusCode(); } @Override public String getHeaderValue(String name) { return innerHttpResponse.getHeaderValue(name); } @Override public HttpHeaders getHeaders() { return innerHttpResponse.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { return cachedBody; } @Override public Mono<byte[]> getBodyAsByteArray() { return cachedBody.next().map(byteBuffer -> byteBuffer.array()); } @Override public Mono<String> getBodyAsString() { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, StandardCharsets.UTF_8)); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, charset)); } @Override public BufferedHttpResponse buffer() { return this; } }
class BufferedHttpResponse extends HttpResponse { private final HttpResponse innerHttpResponse; private final Flux<ByteBuffer> cachedBody; /** * Creates a buffered HTTP response. * * @param innerHttpResponse The HTTP response to buffer */ @Override public int getStatusCode() { return innerHttpResponse.getStatusCode(); } @Override public String getHeaderValue(String name) { return innerHttpResponse.getHeaderValue(name); } @Override public HttpHeaders getHeaders() { return innerHttpResponse.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { return cachedBody; } @Override public Mono<byte[]> getBodyAsByteArray() { return cachedBody.next().map(ByteBuffer::array); } @Override public Mono<String> getBodyAsString() { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, StandardCharsets.UTF_8)); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, charset)); } @Override public BufferedHttpResponse buffer() { return this; } }
```suggestion return cachedBody.next().map(ByteBuffer::array); ```
public Mono<byte[]> getBodyAsByteArray() { return cachedBody.next().map(byteBuffer -> byteBuffer.array()); }
return cachedBody.next().map(byteBuffer -> byteBuffer.array());
public Mono<byte[]> getBodyAsByteArray() { return cachedBody.next().map(ByteBuffer::array); }
class BufferedHttpResponse extends HttpResponse { private final HttpResponse innerHttpResponse; private final Flux<ByteBuffer> cachedBody; /** * Creates a buffered HTTP response. * * @param innerHttpResponse The HTTP response to buffer */ public BufferedHttpResponse(HttpResponse innerHttpResponse) { super(innerHttpResponse.getRequest()); this.innerHttpResponse = innerHttpResponse; this.cachedBody = FluxUtil.collectBytesInByteBufferStream(innerHttpResponse.getBody()) .map(bytes -> ByteBuffer.wrap(bytes)) .flux() .cache(); } @Override public int getStatusCode() { return innerHttpResponse.getStatusCode(); } @Override public String getHeaderValue(String name) { return innerHttpResponse.getHeaderValue(name); } @Override public HttpHeaders getHeaders() { return innerHttpResponse.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { return cachedBody; } @Override @Override public Mono<String> getBodyAsString() { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, StandardCharsets.UTF_8)); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, charset)); } @Override public BufferedHttpResponse buffer() { return this; } }
class BufferedHttpResponse extends HttpResponse { private final HttpResponse innerHttpResponse; private final Flux<ByteBuffer> cachedBody; /** * Creates a buffered HTTP response. * * @param innerHttpResponse The HTTP response to buffer */ public BufferedHttpResponse(HttpResponse innerHttpResponse) { super(innerHttpResponse.getRequest()); this.innerHttpResponse = innerHttpResponse; this.cachedBody = FluxUtil.collectBytesInByteBufferStream(innerHttpResponse.getBody()) .map(ByteBuffer::wrap) .flux() .cache(); } @Override public int getStatusCode() { return innerHttpResponse.getStatusCode(); } @Override public String getHeaderValue(String name) { return innerHttpResponse.getHeaderValue(name); } @Override public HttpHeaders getHeaders() { return innerHttpResponse.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { return cachedBody; } @Override @Override public Mono<String> getBodyAsString() { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, StandardCharsets.UTF_8)); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsByteArray() .map(bytes -> bytes == null ? null : new String(bytes, charset)); } @Override public BufferedHttpResponse buffer() { return this; } }
Do we need to handle Flux still?
private static Type extractEntityTypeFromReturnType(HttpResponseDecodeData decodeData) { Type token = decodeData.getReturnType(); if (token != null) { if (TypeUtil.isTypeOrSubTypeOf(token, Mono.class)) { token = TypeUtil.getTypeArgument(token); } if (TypeUtil.isTypeOrSubTypeOf(token, Response.class)) { token = TypeUtil.getRestResponseBodyType(token); } } return token; }
token = TypeUtil.getRestResponseBodyType(token);
private static Type extractEntityTypeFromReturnType(HttpResponseDecodeData decodeData) { Type token = decodeData.getReturnType(); if (token != null) { if (TypeUtil.isTypeOrSubTypeOf(token, Mono.class)) { token = TypeUtil.getTypeArgument(token); } if (TypeUtil.isTypeOrSubTypeOf(token, Response.class)) { token = TypeUtil.getRestResponseBodyType(token); } } return token; }
class instead. wireResponseType = TypeUtil.createParameterizedType(ItemPage.class, resultType); } else { wireResponseType = wireType; }
class instead. wireResponseType = TypeUtil.createParameterizedType(ItemPage.class, resultType); } else { wireResponseType = wireType; }
Not in this path. We handle "Flux<ByteBuffer>" in different code path.
private static Type extractEntityTypeFromReturnType(HttpResponseDecodeData decodeData) { Type token = decodeData.getReturnType(); if (token != null) { if (TypeUtil.isTypeOrSubTypeOf(token, Mono.class)) { token = TypeUtil.getTypeArgument(token); } if (TypeUtil.isTypeOrSubTypeOf(token, Response.class)) { token = TypeUtil.getRestResponseBodyType(token); } } return token; }
token = TypeUtil.getRestResponseBodyType(token);
private static Type extractEntityTypeFromReturnType(HttpResponseDecodeData decodeData) { Type token = decodeData.getReturnType(); if (token != null) { if (TypeUtil.isTypeOrSubTypeOf(token, Mono.class)) { token = TypeUtil.getTypeArgument(token); } if (TypeUtil.isTypeOrSubTypeOf(token, Response.class)) { token = TypeUtil.getRestResponseBodyType(token); } } return token; }
class instead. wireResponseType = TypeUtil.createParameterizedType(ItemPage.class, resultType); } else { wireResponseType = wireType; }
class instead. wireResponseType = TypeUtil.createParameterizedType(ItemPage.class, resultType); } else { wireResponseType = wireType; }
Ah, perfect!
private static Type extractEntityTypeFromReturnType(HttpResponseDecodeData decodeData) { Type token = decodeData.getReturnType(); if (token != null) { if (TypeUtil.isTypeOrSubTypeOf(token, Mono.class)) { token = TypeUtil.getTypeArgument(token); } if (TypeUtil.isTypeOrSubTypeOf(token, Response.class)) { token = TypeUtil.getRestResponseBodyType(token); } } return token; }
token = TypeUtil.getRestResponseBodyType(token);
private static Type extractEntityTypeFromReturnType(HttpResponseDecodeData decodeData) { Type token = decodeData.getReturnType(); if (token != null) { if (TypeUtil.isTypeOrSubTypeOf(token, Mono.class)) { token = TypeUtil.getTypeArgument(token); } if (TypeUtil.isTypeOrSubTypeOf(token, Response.class)) { token = TypeUtil.getRestResponseBodyType(token); } } return token; }
class instead. wireResponseType = TypeUtil.createParameterizedType(ItemPage.class, resultType); } else { wireResponseType = wireType; }
class instead. wireResponseType = TypeUtil.createParameterizedType(ItemPage.class, resultType); } else { wireResponseType = wireType; }
We don't need this `.single()` anymore, right ? Here and every other method in this class.
static public Mono<Database> createDatabaseIfNotExists(AsyncDocumentClient client, String databaseName) { return client.readDatabase("/dbs/" + databaseName, null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { Database d = new Database(); d.setId(databaseName); return client.createDatabase(d, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); }
).map(ResourceResponse::getResource).single();
static public Mono<Database> createDatabaseIfNotExists(AsyncDocumentClient client, String databaseName) { return client.readDatabase("/dbs/" + databaseName, null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { Database d = new Database(); d.setId(databaseName); return client.createDatabase(d, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); }
class Helpers { static public String createDocumentCollectionUri(String databaseName, String collectionName) { return String.format("/dbs/%s/colls/%s", databaseName, collectionName); } static public String createDatabaseUri(String databaseName) { return String.format("/dbs/%s", databaseName); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, String collectionName) { return client.readCollection(createDocumentCollectionUri(databaseName, collectionName), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { DocumentCollection collection = new DocumentCollection(); collection.setId(collectionName); return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, DocumentCollection collection) { return client.readCollection(createDocumentCollectionUri(databaseName, collection.getId()), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } }
class Helpers { static public String createDocumentCollectionUri(String databaseName, String collectionName) { return String.format("/dbs/%s/colls/%s", databaseName, collectionName); } static public String createDatabaseUri(String databaseName) { return String.format("/dbs/%s", databaseName); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, String collectionName) { return client.readCollection(createDocumentCollectionUri(databaseName, collectionName), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { DocumentCollection collection = new DocumentCollection(); collection.setId(collectionName); return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, DocumentCollection collection) { return client.readCollection(createDocumentCollectionUri(databaseName, collection.getId()), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } }
Same here, unnecessary conversion to `.single()`
public void transformObservableToCompletableFuture() throws Exception { Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); CompletableFuture<ResourceResponse<DocumentCollection>> future = createCollectionObservable.single().toFuture(); ResourceResponse<DocumentCollection> rrd = future.get(); assertThat(rrd.getRequestCharge(), greaterThan((double) 0)); System.out.println(rrd.getRequestCharge()); }
CompletableFuture<ResourceResponse<DocumentCollection>> future = createCollectionObservable.single().toFuture();
public void transformObservableToCompletableFuture() throws Exception { Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); CompletableFuture<ResourceResponse<DocumentCollection>> future = createCollectionObservable.single().toFuture(); ResourceResponse<DocumentCollection> rrd = future.get(); assertThat(rrd.getRequestCharge(), greaterThan((double) 0)); System.out.println(rrd.getRequestCharge()); }
class CollectionCRUDAsyncAPITest extends DocumentClientTest { private final static int TIMEOUT = 120000; private Database createdDatabase; private AsyncDocumentClient client; private DocumentCollection collectionDefinition; @BeforeClass(groups = "samples", timeOut = TIMEOUT) public void before_CollectionCRUDAsyncAPITest() { ConnectionPolicy connectionPolicy = new ConnectionPolicy().setConnectionMode(ConnectionMode.DIRECT); this.clientBuilder() .withServiceEndpoint(TestConfigurations.HOST) .withMasterKeyOrResourceToken(TestConfigurations.MASTER_KEY) .withConnectionPolicy(connectionPolicy) .withConsistencyLevel(ConsistencyLevel.SESSION); this.client = this.clientBuilder().build(); createdDatabase = Utils.createDatabaseForTest(client); } @BeforeMethod(groups = "samples", timeOut = TIMEOUT) public void before() { collectionDefinition = new DocumentCollection(); collectionDefinition.setId(UUID.randomUUID().toString()); PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition(); ArrayList<String> paths = new ArrayList<String>(); paths.add("/mypk"); partitionKeyDef.setPaths(paths); collectionDefinition.setPartitionKey(partitionKeyDef); } @AfterClass(groups = "samples", timeOut = TIMEOUT) public void shutdown() { Utils.safeClean(client, createdDatabase); Utils.safeClose(client); } /** * CREATE a document collection using async api. * If you want a single partition collection with 10,000 RU/s throughput, * the only way to do so is to create a single partition collection with lower * throughput (400) and then increase the throughput. */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_SinglePartition_Async() throws Exception { RequestOptions singlePartitionRequestOptions = new RequestOptions(); singlePartitionRequestOptions.setOfferThroughput(400); Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, singlePartitionRequestOptions); final CountDownLatch countDownLatch = new CountDownLatch(1); createCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while creating the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * CREATE a document collection using async api. * This test uses java8 lambda expression. * See testCreateCollection_Async_withoutLambda for usage without lambda * expressions. * Set the throughput to be > 10,000 RU/s * to create a multi partition collection. */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_MultiPartition_Async() throws Exception { RequestOptions multiPartitionRequestOptions = new RequestOptions(); multiPartitionRequestOptions.setOfferThroughput(20000); Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client.createCollection( getDatabaseLink(), getMultiPartitionCollectionDefinition(), multiPartitionRequestOptions); final CountDownLatch countDownLatch = new CountDownLatch(1); createCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while creating the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * CREATE a document Collection using async api, without java8 lambda expressions */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_Async_withoutLambda() throws Exception { Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); final CountDownLatch countDownLatch = new CountDownLatch(1); Consumer<ResourceResponse<DocumentCollection>> onCollectionCreationAction = new Consumer<ResourceResponse<DocumentCollection>>() { @Override public void accept(ResourceResponse<DocumentCollection> resourceResponse) { System.out.println(resourceResponse.getActivityId()); countDownLatch.countDown(); } }; Consumer<Throwable> onError = new Consumer<Throwable>() { @Override public void accept(Throwable error) { System.err.println( "an error occurred while creating the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); } }; createCollectionObservable.single() .subscribe(onCollectionCreationAction, onError); countDownLatch.await(); } /** * CREATE a collection in a blocking manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_toBlocking() { Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); createCollectionObservable.single().block(); } /** * Attempt to create a Collection which already exists * - First create a Collection * - Using the async api generate an async collection creation observable * - Converts the Observable to blocking using Observable.toBlocking() api * - Catch already exist failure (409) */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_toBlocking_CollectionAlreadyExists_Fails() { client.createCollection(getDatabaseLink(), collectionDefinition, null).single().block(); Mono<ResourceResponse<DocumentCollection>> collectionForTestObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); try { collectionForTestObservable.single() .block(); assertThat("Should not reach here", false); } catch (CosmosClientException e) { assertThat("Collection already exists.", e.getStatusCode(), equalTo(409)); } } /** * You can convert a Flux to a CompletableFuture. */ @Test(groups = "samples", timeOut = TIMEOUT) /** * READ a Collection in an Async manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void createAndReadCollection() throws Exception { DocumentCollection documentCollection = client .createCollection(getDatabaseLink(), collectionDefinition, null).single().block() .getResource(); Mono<ResourceResponse<DocumentCollection>> readCollectionObservable = client .readCollection(getCollectionLink(documentCollection), null); final CountDownLatch countDownLatch = new CountDownLatch(1); readCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while reading the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * DELETE a Collection in an Async manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void createAndDeleteCollection() throws Exception { DocumentCollection documentCollection = client .createCollection(getDatabaseLink(), collectionDefinition, null).single().block() .getResource(); Mono<ResourceResponse<DocumentCollection>> deleteCollectionObservable = client .deleteCollection(getCollectionLink(documentCollection), null); final CountDownLatch countDownLatch = new CountDownLatch(1); deleteCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while deleting the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * Query a Collection in an Async manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void collectionCreateAndQuery() throws Exception { DocumentCollection collection = client .createCollection(getDatabaseLink(), collectionDefinition, null).single().block() .getResource(); Flux<FeedResponse<DocumentCollection>> queryCollectionObservable = client.queryCollections( getDatabaseLink(), String.format("SELECT * FROM r where r.id = '%s'", collection.getId()), null); final CountDownLatch countDownLatch = new CountDownLatch(1); queryCollectionObservable.collectList().subscribe(collectionFeedResponseList -> { assertThat(collectionFeedResponseList.size(), equalTo(1)); FeedResponse<DocumentCollection> collectionFeedResponse = collectionFeedResponseList.get(0); assertThat(collectionFeedResponse.getResults().size(), equalTo(1)); DocumentCollection foundCollection = collectionFeedResponse.getResults().get(0); assertThat(foundCollection.getId(), equalTo(collection.getId())); System.out.println(collectionFeedResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println("an error occurred while querying the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } private String getDatabaseLink() { return "dbs/" + createdDatabase.getId(); } private String getCollectionLink(DocumentCollection collection) { return "dbs/" + createdDatabase.getId() + "/colls/" + collection.getId(); } private DocumentCollection getMultiPartitionCollectionDefinition() { DocumentCollection collectionDefinition = new DocumentCollection(); collectionDefinition.setId(UUID.randomUUID().toString()); PartitionKeyDefinition partitionKeyDefinition = new PartitionKeyDefinition(); List<String> paths = new ArrayList<>(); paths.add("/city"); partitionKeyDefinition.setPaths(paths); collectionDefinition.setPartitionKey(partitionKeyDefinition); IndexingPolicy indexingPolicy = new IndexingPolicy(); List<IncludedPath> includedPaths = new ArrayList<>(); IncludedPath includedPath = new IncludedPath(); includedPath.setPath("/*"); Collection<Index> indexes = new ArrayList<>(); Index stringIndex = Index.range(DataType.STRING); BridgeInternal.setProperty(stringIndex, "getPrecision", -1); indexes.add(stringIndex); Index numberIndex = Index.range(DataType.NUMBER); BridgeInternal.setProperty(numberIndex, "getPrecision", -1); indexes.add(numberIndex); includedPath.setIndexes(indexes); includedPaths.add(includedPath); indexingPolicy.setIncludedPaths(includedPaths); collectionDefinition.setIndexingPolicy(indexingPolicy); return collectionDefinition; } }
class CollectionCRUDAsyncAPITest extends DocumentClientTest { private final static int TIMEOUT = 120000; private Database createdDatabase; private AsyncDocumentClient client; private DocumentCollection collectionDefinition; @BeforeClass(groups = "samples", timeOut = TIMEOUT) public void before_CollectionCRUDAsyncAPITest() { ConnectionPolicy connectionPolicy = new ConnectionPolicy().setConnectionMode(ConnectionMode.DIRECT); this.clientBuilder() .withServiceEndpoint(TestConfigurations.HOST) .withMasterKeyOrResourceToken(TestConfigurations.MASTER_KEY) .withConnectionPolicy(connectionPolicy) .withConsistencyLevel(ConsistencyLevel.SESSION); this.client = this.clientBuilder().build(); createdDatabase = Utils.createDatabaseForTest(client); } @BeforeMethod(groups = "samples", timeOut = TIMEOUT) public void before() { collectionDefinition = new DocumentCollection(); collectionDefinition.setId(UUID.randomUUID().toString()); PartitionKeyDefinition partitionKeyDef = new PartitionKeyDefinition(); ArrayList<String> paths = new ArrayList<String>(); paths.add("/mypk"); partitionKeyDef.setPaths(paths); collectionDefinition.setPartitionKey(partitionKeyDef); } @AfterClass(groups = "samples", timeOut = TIMEOUT) public void shutdown() { Utils.safeClean(client, createdDatabase); Utils.safeClose(client); } /** * CREATE a document collection using async api. * If you want a single partition collection with 10,000 RU/s throughput, * the only way to do so is to create a single partition collection with lower * throughput (400) and then increase the throughput. */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_SinglePartition_Async() throws Exception { RequestOptions singlePartitionRequestOptions = new RequestOptions(); singlePartitionRequestOptions.setOfferThroughput(400); Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, singlePartitionRequestOptions); final CountDownLatch countDownLatch = new CountDownLatch(1); createCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while creating the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * CREATE a document collection using async api. * This test uses java8 lambda expression. * See testCreateCollection_Async_withoutLambda for usage without lambda * expressions. * Set the throughput to be > 10,000 RU/s * to create a multi partition collection. */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_MultiPartition_Async() throws Exception { RequestOptions multiPartitionRequestOptions = new RequestOptions(); multiPartitionRequestOptions.setOfferThroughput(20000); Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client.createCollection( getDatabaseLink(), getMultiPartitionCollectionDefinition(), multiPartitionRequestOptions); final CountDownLatch countDownLatch = new CountDownLatch(1); createCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while creating the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * CREATE a document Collection using async api, without java8 lambda expressions */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_Async_withoutLambda() throws Exception { Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); final CountDownLatch countDownLatch = new CountDownLatch(1); Consumer<ResourceResponse<DocumentCollection>> onCollectionCreationAction = new Consumer<ResourceResponse<DocumentCollection>>() { @Override public void accept(ResourceResponse<DocumentCollection> resourceResponse) { System.out.println(resourceResponse.getActivityId()); countDownLatch.countDown(); } }; Consumer<Throwable> onError = new Consumer<Throwable>() { @Override public void accept(Throwable error) { System.err.println( "an error occurred while creating the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); } }; createCollectionObservable.single() .subscribe(onCollectionCreationAction, onError); countDownLatch.await(); } /** * CREATE a collection in a blocking manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_toBlocking() { Mono<ResourceResponse<DocumentCollection>> createCollectionObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); createCollectionObservable.single().block(); } /** * Attempt to create a Collection which already exists * - First create a Collection * - Using the async api generate an async collection creation observable * - Converts the Observable to blocking using Observable.toBlocking() api * - Catch already exist failure (409) */ @Test(groups = "samples", timeOut = TIMEOUT) public void createCollection_toBlocking_CollectionAlreadyExists_Fails() { client.createCollection(getDatabaseLink(), collectionDefinition, null).single().block(); Mono<ResourceResponse<DocumentCollection>> collectionForTestObservable = client .createCollection(getDatabaseLink(), collectionDefinition, null); try { collectionForTestObservable.single() .block(); assertThat("Should not reach here", false); } catch (CosmosClientException e) { assertThat("Collection already exists.", e.getStatusCode(), equalTo(409)); } } /** * You can convert a Flux to a CompletableFuture. */ @Test(groups = "samples", timeOut = TIMEOUT) /** * READ a Collection in an Async manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void createAndReadCollection() throws Exception { DocumentCollection documentCollection = client .createCollection(getDatabaseLink(), collectionDefinition, null).single().block() .getResource(); Mono<ResourceResponse<DocumentCollection>> readCollectionObservable = client .readCollection(getCollectionLink(documentCollection), null); final CountDownLatch countDownLatch = new CountDownLatch(1); readCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while reading the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * DELETE a Collection in an Async manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void createAndDeleteCollection() throws Exception { DocumentCollection documentCollection = client .createCollection(getDatabaseLink(), collectionDefinition, null).single().block() .getResource(); Mono<ResourceResponse<DocumentCollection>> deleteCollectionObservable = client .deleteCollection(getCollectionLink(documentCollection), null); final CountDownLatch countDownLatch = new CountDownLatch(1); deleteCollectionObservable.single() .subscribe(collectionResourceResponse -> { System.out.println(collectionResourceResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println( "an error occurred while deleting the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } /** * Query a Collection in an Async manner */ @Test(groups = "samples", timeOut = TIMEOUT) public void collectionCreateAndQuery() throws Exception { DocumentCollection collection = client .createCollection(getDatabaseLink(), collectionDefinition, null).single().block() .getResource(); Flux<FeedResponse<DocumentCollection>> queryCollectionObservable = client.queryCollections( getDatabaseLink(), String.format("SELECT * FROM r where r.id = '%s'", collection.getId()), null); final CountDownLatch countDownLatch = new CountDownLatch(1); queryCollectionObservable.collectList().subscribe(collectionFeedResponseList -> { assertThat(collectionFeedResponseList.size(), equalTo(1)); FeedResponse<DocumentCollection> collectionFeedResponse = collectionFeedResponseList.get(0); assertThat(collectionFeedResponse.getResults().size(), equalTo(1)); DocumentCollection foundCollection = collectionFeedResponse.getResults().get(0); assertThat(foundCollection.getId(), equalTo(collection.getId())); System.out.println(collectionFeedResponse.getActivityId()); countDownLatch.countDown(); }, error -> { System.err.println("an error occurred while querying the collection: actual cause: " + error.getMessage()); countDownLatch.countDown(); }); countDownLatch.await(); } private String getDatabaseLink() { return "dbs/" + createdDatabase.getId(); } private String getCollectionLink(DocumentCollection collection) { return "dbs/" + createdDatabase.getId() + "/colls/" + collection.getId(); } private DocumentCollection getMultiPartitionCollectionDefinition() { DocumentCollection collectionDefinition = new DocumentCollection(); collectionDefinition.setId(UUID.randomUUID().toString()); PartitionKeyDefinition partitionKeyDefinition = new PartitionKeyDefinition(); List<String> paths = new ArrayList<>(); paths.add("/city"); partitionKeyDefinition.setPaths(paths); collectionDefinition.setPartitionKey(partitionKeyDefinition); IndexingPolicy indexingPolicy = new IndexingPolicy(); List<IncludedPath> includedPaths = new ArrayList<>(); IncludedPath includedPath = new IncludedPath(); includedPath.setPath("/*"); Collection<Index> indexes = new ArrayList<>(); Index stringIndex = Index.range(DataType.STRING); BridgeInternal.setProperty(stringIndex, "getPrecision", -1); indexes.add(stringIndex); Index numberIndex = Index.range(DataType.NUMBER); BridgeInternal.setProperty(numberIndex, "getPrecision", -1); indexes.add(numberIndex); includedPath.setIndexes(indexes); includedPaths.add(includedPath); indexingPolicy.setIncludedPaths(includedPaths); collectionDefinition.setIndexingPolicy(indexingPolicy); return collectionDefinition; } }
Please remove unnecessary `.single()`
private Mono<DocumentCollection> readCollectionAsync(String collectionLink, DocumentClientRetryPolicy retryPolicyInstance, Map<String, Object> properties) { String path = Utils.joinPath(collectionLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Read, ResourceType.DocumentCollection, path, new HashMap<>()); request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); String resourceName = request.getResourceAddress(); String authorizationToken = tokenProvider.getUserAuthorizationToken( resourceName, request.getResourceType(), HttpConstants.HttpMethods.GET, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, properties); try { authorizationToken = URLEncoder.encode(authorizationToken, "UTF-8"); } catch (UnsupportedEncodingException e) { return Mono.error(new IllegalStateException("Failed to encode authtoken.", e)); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorizationToken); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } Mono<RxDocumentServiceResponse> responseObs = this.storeModel.processMessage(request); return responseObs.map(response -> BridgeInternal.toResourceResponse(response, DocumentCollection.class) .getResource()).single(); }
.getResource()).single();
private Mono<DocumentCollection> readCollectionAsync(String collectionLink, DocumentClientRetryPolicy retryPolicyInstance, Map<String, Object> properties) { String path = Utils.joinPath(collectionLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Read, ResourceType.DocumentCollection, path, new HashMap<>()); request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); String resourceName = request.getResourceAddress(); String authorizationToken = tokenProvider.getUserAuthorizationToken( resourceName, request.getResourceType(), HttpConstants.HttpMethods.GET, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, properties); try { authorizationToken = URLEncoder.encode(authorizationToken, "UTF-8"); } catch (UnsupportedEncodingException e) { return Mono.error(new IllegalStateException("Failed to encode authtoken.", e)); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorizationToken); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } Mono<RxDocumentServiceResponse> responseObs = this.storeModel.processMessage(request); return responseObs.map(response -> BridgeInternal.toResourceResponse(response, DocumentCollection.class) .getResource()).single(); }
class RxClientCollectionCache extends RxCollectionCache { private RxStoreModel storeModel; private final IAuthorizationTokenProvider tokenProvider; private final IRetryPolicyFactory retryPolicy; private final ISessionContainer sessionContainer; public RxClientCollectionCache(ISessionContainer sessionContainer, RxStoreModel storeModel, IAuthorizationTokenProvider tokenProvider, IRetryPolicyFactory retryPolicy) { this.storeModel = storeModel; this.tokenProvider = tokenProvider; this.retryPolicy = retryPolicy; this.sessionContainer = sessionContainer; } protected Mono<DocumentCollection> getByRidAsync(String collectionRid, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(PathsHelper.generatePath(ResourceType.DocumentCollection, collectionRid, false), retryPolicyInstance, properties) , retryPolicyInstance); } protected Mono<DocumentCollection> getByNameAsync(String resourceAddress, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(resourceAddress, retryPolicyInstance, properties), retryPolicyInstance); } }
class RxClientCollectionCache extends RxCollectionCache { private RxStoreModel storeModel; private final IAuthorizationTokenProvider tokenProvider; private final IRetryPolicyFactory retryPolicy; private final ISessionContainer sessionContainer; public RxClientCollectionCache(ISessionContainer sessionContainer, RxStoreModel storeModel, IAuthorizationTokenProvider tokenProvider, IRetryPolicyFactory retryPolicy) { this.storeModel = storeModel; this.tokenProvider = tokenProvider; this.retryPolicy = retryPolicy; this.sessionContainer = sessionContainer; } protected Mono<DocumentCollection> getByRidAsync(String collectionRid, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(PathsHelper.generatePath(ResourceType.DocumentCollection, collectionRid, false), retryPolicyInstance, properties) , retryPolicyInstance); } protected Mono<DocumentCollection> getByNameAsync(String resourceAddress, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(resourceAddress, retryPolicyInstance, properties), retryPolicyInstance); } }
as discussed offline. they are intentionally kept as they will do correctness validation to ensure Mono is not empty.
static public Mono<Database> createDatabaseIfNotExists(AsyncDocumentClient client, String databaseName) { return client.readDatabase("/dbs/" + databaseName, null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { Database d = new Database(); d.setId(databaseName); return client.createDatabase(d, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); }
).map(ResourceResponse::getResource).single();
static public Mono<Database> createDatabaseIfNotExists(AsyncDocumentClient client, String databaseName) { return client.readDatabase("/dbs/" + databaseName, null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { Database d = new Database(); d.setId(databaseName); return client.createDatabase(d, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); }
class Helpers { static public String createDocumentCollectionUri(String databaseName, String collectionName) { return String.format("/dbs/%s/colls/%s", databaseName, collectionName); } static public String createDatabaseUri(String databaseName) { return String.format("/dbs/%s", databaseName); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, String collectionName) { return client.readCollection(createDocumentCollectionUri(databaseName, collectionName), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { DocumentCollection collection = new DocumentCollection(); collection.setId(collectionName); return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, DocumentCollection collection) { return client.readCollection(createDocumentCollectionUri(databaseName, collection.getId()), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } }
class Helpers { static public String createDocumentCollectionUri(String databaseName, String collectionName) { return String.format("/dbs/%s/colls/%s", databaseName, collectionName); } static public String createDatabaseUri(String databaseName) { return String.format("/dbs/%s", databaseName); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, String collectionName) { return client.readCollection(createDocumentCollectionUri(databaseName, collectionName), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { DocumentCollection collection = new DocumentCollection(); collection.setId(collectionName); return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } static public Mono<DocumentCollection> createCollectionIfNotExists(AsyncDocumentClient client, String databaseName, DocumentCollection collection) { return client.readCollection(createDocumentCollectionUri(databaseName, collection.getId()), null) .onErrorResume( e -> { if (e instanceof CosmosClientException) { CosmosClientException dce = (CosmosClientException) e; if (dce.getStatusCode() == 404) { return client.createCollection(createDatabaseUri(databaseName), collection, null); } } return Mono.error(e); } ).map(ResourceResponse::getResource).single(); } }
as discussed offline. they are intentionally kept as they will do correctness validation to ensure Mono is not empty.
private Mono<DocumentCollection> readCollectionAsync(String collectionLink, DocumentClientRetryPolicy retryPolicyInstance, Map<String, Object> properties) { String path = Utils.joinPath(collectionLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Read, ResourceType.DocumentCollection, path, new HashMap<>()); request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); String resourceName = request.getResourceAddress(); String authorizationToken = tokenProvider.getUserAuthorizationToken( resourceName, request.getResourceType(), HttpConstants.HttpMethods.GET, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, properties); try { authorizationToken = URLEncoder.encode(authorizationToken, "UTF-8"); } catch (UnsupportedEncodingException e) { return Mono.error(new IllegalStateException("Failed to encode authtoken.", e)); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorizationToken); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } Mono<RxDocumentServiceResponse> responseObs = this.storeModel.processMessage(request); return responseObs.map(response -> BridgeInternal.toResourceResponse(response, DocumentCollection.class) .getResource()).single(); }
.getResource()).single();
private Mono<DocumentCollection> readCollectionAsync(String collectionLink, DocumentClientRetryPolicy retryPolicyInstance, Map<String, Object> properties) { String path = Utils.joinPath(collectionLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Read, ResourceType.DocumentCollection, path, new HashMap<>()); request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); String resourceName = request.getResourceAddress(); String authorizationToken = tokenProvider.getUserAuthorizationToken( resourceName, request.getResourceType(), HttpConstants.HttpMethods.GET, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, properties); try { authorizationToken = URLEncoder.encode(authorizationToken, "UTF-8"); } catch (UnsupportedEncodingException e) { return Mono.error(new IllegalStateException("Failed to encode authtoken.", e)); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorizationToken); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } Mono<RxDocumentServiceResponse> responseObs = this.storeModel.processMessage(request); return responseObs.map(response -> BridgeInternal.toResourceResponse(response, DocumentCollection.class) .getResource()).single(); }
class RxClientCollectionCache extends RxCollectionCache { private RxStoreModel storeModel; private final IAuthorizationTokenProvider tokenProvider; private final IRetryPolicyFactory retryPolicy; private final ISessionContainer sessionContainer; public RxClientCollectionCache(ISessionContainer sessionContainer, RxStoreModel storeModel, IAuthorizationTokenProvider tokenProvider, IRetryPolicyFactory retryPolicy) { this.storeModel = storeModel; this.tokenProvider = tokenProvider; this.retryPolicy = retryPolicy; this.sessionContainer = sessionContainer; } protected Mono<DocumentCollection> getByRidAsync(String collectionRid, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(PathsHelper.generatePath(ResourceType.DocumentCollection, collectionRid, false), retryPolicyInstance, properties) , retryPolicyInstance); } protected Mono<DocumentCollection> getByNameAsync(String resourceAddress, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(resourceAddress, retryPolicyInstance, properties), retryPolicyInstance); } }
class RxClientCollectionCache extends RxCollectionCache { private RxStoreModel storeModel; private final IAuthorizationTokenProvider tokenProvider; private final IRetryPolicyFactory retryPolicy; private final ISessionContainer sessionContainer; public RxClientCollectionCache(ISessionContainer sessionContainer, RxStoreModel storeModel, IAuthorizationTokenProvider tokenProvider, IRetryPolicyFactory retryPolicy) { this.storeModel = storeModel; this.tokenProvider = tokenProvider; this.retryPolicy = retryPolicy; this.sessionContainer = sessionContainer; } protected Mono<DocumentCollection> getByRidAsync(String collectionRid, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(PathsHelper.generatePath(ResourceType.DocumentCollection, collectionRid, false), retryPolicyInstance, properties) , retryPolicyInstance); } protected Mono<DocumentCollection> getByNameAsync(String resourceAddress, Map<String, Object> properties) { DocumentClientRetryPolicy retryPolicyInstance = new ClearingSessionContainerClientRetryPolicy(this.sessionContainer, this.retryPolicy.getRequestPolicy()); return ObservableHelper.inlineIfPossible( () -> this.readCollectionAsync(resourceAddress, retryPolicyInstance, properties), retryPolicyInstance); } }
please also change Send span (line 81 to have Client kind)
public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; Builder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, Builder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(Span.Kind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(Span.Kind.PRODUCER).startSpan(); context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } }
span = spanBuilder.setSpanKind(Span.Kind.PRODUCER).startSpan();
public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; Builder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, Builder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(Span.Kind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(Span.Kind.PRODUCER).startSpan(); context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private static final Tracer TRACER = OpenTelemetry.getTracerFactory().get("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String COMPONENT = "component"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Builder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, "az.tracing.namespace", null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, AttributeValue.stringAttributeValue(tracingNamespace)); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span.setStatus(HttpTraceUtil.parseResponseStatus(responseCode, throwable)); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, AttributeValue.stringAttributeValue(value)); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span.setStatus(AmqpTraceUtil.parseStatusMessage(statusMessage, throwable)); } span.end(); } @Override public void addLink(Context context) { final Builder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, Builder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { Builder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(Span.Kind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", TRACER.withSpan(span)); } /** * Creates a {@link Builder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private static Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { Builder spanBuilder = TRACER.spanBuilder(spanName).setParent(spanContext); spanBuilder.setSpanKind(Span.Kind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the * current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); span.setAttribute(COMPONENT, AttributeValue.stringAttributeValue(parseComponentValue(spanName))); span.setAttribute( MESSAGE_BUS_DESTINATION, AttributeValue.stringAttributeValue(getOrDefault(context, ENTITY_PATH_KEY, "", String.class))); span.setAttribute( PEER_ENDPOINT, AttributeValue.stringAttributeValue(getOrDefault(context, HOST_NAME_KEY, "", String.class))); } /** * Extracts the component name from the given span name. * * @param spanName The spanName containing the component name i.e spanName = "EventHubs.send" * @return The component name contained in the context i.e "eventhubs" */ private static String parseComponentValue(String spanName) { if (spanName != null && !spanName.isEmpty()) { int componentNameEndIndex = spanName.lastIndexOf("."); if (componentNameEndIndex != -1) { return spanName.substring(0, componentNameEndIndex); } } return ""; } /** * Returns a {@link Builder} to create and start a new child {@link Span} with parent being * the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.Builder} to create and start a new {@code Span}. */ private Builder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = TRACER.getCurrentSpan(); } return TRACER.spanBuilder(spanNameKey).setParent(parentSpan); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.warning("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private static final Tracer TRACER = OpenTelemetry.getTracerFactory().get("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String COMPONENT = "component"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Builder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, "az.tracing.namespace", null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, AttributeValue.stringAttributeValue(tracingNamespace)); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span.setStatus(HttpTraceUtil.parseResponseStatus(responseCode, throwable)); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, AttributeValue.stringAttributeValue(value)); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span.setStatus(AmqpTraceUtil.parseStatusMessage(statusMessage, throwable)); } span.end(); } @Override public void addLink(Context context) { final Builder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, Builder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { Builder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(Span.Kind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", TRACER.withSpan(span)); } /** * Creates a {@link Builder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private static Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { Builder spanBuilder = TRACER.spanBuilder(spanName).setParent(spanContext); spanBuilder.setSpanKind(Span.Kind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the * current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); span.setAttribute(COMPONENT, AttributeValue.stringAttributeValue(parseComponentValue(spanName))); span.setAttribute( MESSAGE_BUS_DESTINATION, AttributeValue.stringAttributeValue(getOrDefault(context, ENTITY_PATH_KEY, "", String.class))); span.setAttribute( PEER_ENDPOINT, AttributeValue.stringAttributeValue(getOrDefault(context, HOST_NAME_KEY, "", String.class))); } /** * Extracts the component name from the given span name. * * @param spanName The spanName containing the component name i.e spanName = "EventHubs.send" * @return The component name contained in the context i.e "eventhubs" */ private static String parseComponentValue(String spanName) { if (spanName != null && !spanName.isEmpty()) { int componentNameEndIndex = spanName.lastIndexOf("."); if (componentNameEndIndex != -1) { return spanName.substring(0, componentNameEndIndex); } } return ""; } /** * Returns a {@link Builder} to create and start a new child {@link Span} with parent being * the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.Builder} to create and start a new {@code Span}. */ private Builder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = TRACER.getCurrentSpan(); } return TRACER.spanBuilder(spanNameKey).setParent(parentSpan); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.warning("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
If `options.getHttpPipeline()` is not null, the client provided in `TokenRequestContext` is overwritten. Can you please add comments on why this is done? Also, it would be better to check if `options.getHttpPipeline()` is not null first instead of setting up the pipeline which might get overwritten immediately after.
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { HttpPipeline pipeline = setupPipeline(request.getHttpClient(), request.getLogOptions()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
return Mono.error(e);
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); if (options.getHttpPipeline() != null) { publicClientApplicationBuilder = publicClientApplicationBuilder .httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient, HttpLogOptions logOptions) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = logOptions != null ? logOptions : new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault(); return new HttpPipelineBuilder().httpClient(client) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { HttpPipeline pipeline = setupPipeline(request.getHttpClient(), request.getLogOptions()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { HttpPipeline pipeline = setupPipeline(request.getHttpClient(), request.getLogOptions()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault())); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
This line can be removed if you pass the default client as suggested above.
private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault(); return new HttpPipelineBuilder().httpClient(client) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); }
HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault();
private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault())); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
should we use the same pipeline that we create for the publicApplicationClient in the constructor so we don't have to create a new pipeline for every call to authenticate through the confidential client?
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
}
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } if (options.getHttpPipeline() != null) { publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault(); return new HttpPipelineBuilder().httpClient(client) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault())); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
yeah, pipeline construction is now moved to the constructor.
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
}
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } if (options.getHttpPipeline() != null) { publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault(); return new HttpPipelineBuilder().httpClient(client) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (options.getHttpPipeline() != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else { if (options.getHttpClient() != null) { HttpPipeline pipeline = setupPipeline(options.getHttpClient()); applicationBuilder.httpClient(new HttpPipelineAdapter(pipeline)); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault())); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
I believe this should be `applicationBuilder.httpClient(new HttpPipelineAdapter(httpPipeline));` rather than `applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline()));` because `httpPipeline` could have been set directly from the options or constructed in the ctor, so I think it's possible that `options.getHttpPipeline()` is null when `httpPipeline` is not. Also, the client creates new `HttpPipelineAdapter` instances for each msal client it creates. Does each msal client need it's own adapter? If not I would suggest caching the `HttpPipelineAdapter` rather than just the `HttpPipeline`
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipeline != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline()));
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipeline httpPipeline; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; httpPipeline = options.getHttpPipeline(); if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } if (httpPipeline != null) { publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(httpPipeline)); } else { if (options.getHttpClient() != null) { httpPipeline = setupPipeline(options.getHttpClient()); publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(httpPipeline)); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault(); return new HttpPipelineBuilder().httpClient(client) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipeline != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipeline != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault())); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
added the logic to cache the pipeline adapter. Thanks for the catch on http pipeline, it was an oversight by me.
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipeline != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline()));
public Mono<AccessToken> authenticateWithClientSecret(String clientSecret, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromSecret(clientSecret)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (MalformedURLException e) { return Mono.error(e); } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipeline httpPipeline; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; httpPipeline = options.getHttpPipeline(); if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } if (httpPipeline != null) { publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(httpPipeline)); } else { if (options.getHttpClient() != null) { httpPipeline = setupPipeline(options.getHttpClient()); publicClientApplicationBuilder.httpClient(new HttpPipelineAdapter(httpPipeline)); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpClient client = httpClient != null ? httpClient : HttpClient.createDefault(); return new HttpPipelineBuilder().httpClient(client) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipeline != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipeline != null) { applicationBuilder.httpClient(new HttpPipelineAdapter(options.getHttpPipeline())); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private final ClientLogger logger = new ClientLogger(IdentityClient.class); private final IdentityClientOptions options; private final PublicClientApplication publicClientApplication; private final String tenantId; private final String clientId; private HttpPipelineAdapter httpPipelineAdapter; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, IdentityClientOptions options) { if (tenantId == null) { tenantId = "common"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.options = options; if (clientId == null) { this.publicClientApplication = null; } else { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/organizations/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw logger.logExceptionAsWarning(new IllegalStateException(e)); } HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient)); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } else { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault())); publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } } this.publicClientApplication = publicClientApplicationBuilder.build(); } } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param clientSecret the client secret of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a PKCS12 certificate. * * @param pfxCertificatePath the path to the PKCS12 certificate of the application * @param pfxCertificatePassword the password protecting the PFX certificate * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPfxCertificate(String pfxCertificatePath, String pfxCertificatePassword, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return Mono.fromCallable(() -> { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( new FileInputStream(pfxCertificatePath), pfxCertificatePassword)) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } return applicationBuilder.build(); }).flatMap(application -> Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())).build()))) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } /** * Asynchronously acquire a token from Active Directory with a PEM certificate. * * @param pemCertificatePath the path to the PEM certificate of the application * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithPemCertificate(String pemCertificatePath, TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; try { byte[] pemCertificateBytes = Files.readAllBytes(Paths.get(pemCertificatePath)); ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, ClientCredentialFactory.createFromCertificate( CertificateUtil.privateKeyFromPem(pemCertificateBytes), CertificateUtil.publicKeyFromPem(pemCertificateBytes))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())) .map(ar -> new AccessToken(ar.accessToken(), OffsetDateTime.ofInstant(ar.expiresOnDate().toInstant(), ZoneOffset.UTC))); } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return Mono.fromFuture(publicClientApplication.acquireToken( UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUserRefreshToken(TokenRequestContext request, MsalToken msalToken) { SilentParameters parameters; if (msalToken.getAccount() != null) { parameters = SilentParameters.builder(new HashSet<>(request.getScopes()), msalToken.getAccount()).build(); } else { parameters = SilentParameters.builder(new HashSet<>(request.getScopes())).build(); } return Mono.defer(() -> { try { return Mono.fromFuture(publicClientApplication.acquireTokenSilently(parameters)).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } }); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return Mono.fromFuture(() -> { DeviceCodeFlowParameters parameters = DeviceCodeFlowParameters.builder(new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept(new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))).build(); return publicClientApplication.acquireToken(parameters); }).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { return Mono.fromFuture(() -> publicClientApplication.acquireToken( AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .build())) .map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, int port) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId; return AuthorizationCodeListener.create(port) .flatMap(server -> { URI redirectUri; String browserUri; try { redirectUri = new URI(String.format("http: browserUri = String.format("%s/oauth2/v2.0/authorize?response_type=code&response_mode=query&prompt" + "=select_account&client_id=%s&redirect_uri=%s&state=%s&scope=%s", authorityUrl, clientId, redirectUri.toString(), UUID.randomUUID(), String.join(" ", request.getScopes())); } catch (URISyntaxException e) { return server.dispose().then(Mono.error(e)); } return server.listen() .mergeWith(Mono.<String>fromRunnable(() -> { try { openUrl(browserUri); } catch (IOException e) { throw logger.logExceptionAsError(new IllegalStateException(e)); } }).subscribeOn(Schedulers.newSingle("browser"))) .next() .flatMap(code -> authenticateWithAuthorizationCode(request, code, redirectUri)) .onErrorResume(t -> server.dispose().then(Mono.error(t))) .flatMap(msalToken -> server.dispose().then(Mono.just(msalToken))); }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * @param msiEndpoint the endpoint to acquire token from * @param msiSecret the secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String msiEndpoint, String msiSecret, TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); try { payload.append("resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); payload.append("&api-version="); payload.append(URLEncoder.encode("2017-09-01", "UTF-8")); if (clientId != null) { payload.append("&clientid="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } try { URL url = new URL(String.format("%s?%s", msiEndpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (msiSecret != null) { connection.setRequestProperty("Secret", msiSecret); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return Mono.just(SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON)); } catch (IOException e) { return Mono.error(e); } finally { if (connection != null) { connection.disconnect(); } } } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); payload.append("&resource="); payload.append(URLEncoder.encode(resource, "UTF-8")); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, "UTF-8")); } } catch (IOException exception) { return Mono.error(exception); } return checkIMDSAvailable().flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("http: payload.toString())); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.<MSIToken>deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw logger.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode = connection.getResponseCode(); if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw logger.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw logger.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable() { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", "UTF-8")); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("http: payload.toString())); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { logger.error("Browser could not be opened - please open {} in a browser on this device.", url); } } }
Consider using the `validatePrimaryLanguage` for testing the results, rather than converting them to lists and then doing the comparison.
public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage), Collections.singletonList(response))) .verifyComplete(); }
.assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage),
public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(primaryLanguage, response)) .verifyComplete(); }
class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsAsyncClient client; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildAsyncClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .assertNext(response -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> StepVerifier.create(client.detectBatchLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Test Detect batch languages for List of String input with country Hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> StepVerifier.create(client.detectLanguageWithResponse(inputs, countryHint)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test Detect batch languages for List of String input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> StepVerifier.create(client.detectLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test /** * Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { StepVerifier.create(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that TextAnalyticsException is thrown for a empty text input. */ @Test public void detectLanguageEmptyText() { StepVerifier.create(client.detectLanguage("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); StepVerifier.create(client.detectLanguage("!@ .assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage), Collections.singletonList(response))) .verifyComplete(); } /** * Verifies that a Bad request exception is returned for input documents with same ids. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()))); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); StepVerifier.create(client.recognizeEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateCategorizedEntity(categorizedEntity1, response)) .assertNext(response -> validateCategorizedEntity(categorizedEntity2, response)) .verifyComplete(); } @Test public void recognizeEntitiesForEmptyText() { StepVerifier.create(client.recognizeEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeEntitiesForFaultyText() { StepVerifier.create(client.recognizeEntities("!@ .verifyComplete(); } @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE))); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) .assertNext(response -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> StepVerifier.create(client.recognizeEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeEntitiesWithResponse(inputs, language)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch), "en", "Seattle", "https: StepVerifier.create(client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateLinkedEntity(linkedEntity, response)) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForEmptyText() { StepVerifier.create(client.recognizeLinkedEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { StepVerifier.create(client.recognizeLinkedEntities("!@ .verifyComplete(); } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchLinkedEntitiesWithResponse(inputs, options)) .assertNext(response -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> StepVerifier.create(client.recognizeLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeLinkedEntitiesWithResponse(inputs, language)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForTextInput() { PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); StepVerifier.create(client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.")) .assertNext(response -> validatePiiEntity(piiEntity, response)) .verifyComplete(); } @Test public void recognizePiiEntitiesForEmptyText() { StepVerifier.create(client.recognizePiiEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { StepVerifier.create(client.recognizePiiEntities("!@ .verifyComplete(); } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> StepVerifier.create(client.recognizeBatchPiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchPiiEntitiesWithResponse(inputs, options)) .assertNext(response -> validatePiiEntity(true, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> StepVerifier.create(client.recognizePiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizePiiEntitiesWithResponse(inputs, language)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForTextInput() { StepVerifier.create(client.extractKeyPhrases("Bonjour tout le monde.")) .assertNext(response -> assertEquals("monde", response)) .verifyComplete(); } @Test public void extractKeyPhrasesForEmptyText() { StepVerifier.create(client.extractKeyPhrases("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { StepVerifier.create(client.extractKeyPhrases("!@ .verifyComplete(); } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> StepVerifier.create(client.extractBatchKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> StepVerifier.create(client.extractBatchKeyPhrasesWithResponse(inputs, options)) .assertNext(response -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> StepVerifier.create(client.extractKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.extractKeyPhrasesWithResponse(inputs, language)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); StepVerifier .create(client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.")) .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a empty text input. */ @Test public void analyseSentimentForEmptyText() { StepVerifier.create(client.analyzeSentiment("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); StepVerifier.create(client.analyzeSentiment("!@ .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> StepVerifier.create(client.analyzeSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> StepVerifier.create(client.analyzeSentimentWithResponse(inputs, language)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> StepVerifier.create(client.analyzeBatchSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> StepVerifier.create(client.analyzeBatchSentimentWithResponse(inputs, options)) .assertNext(response -> validateSentiment(true, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test client builder with valid subscription key */ @Test public void validKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getSubscriptionKey())).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test client builder with invalid subscription key */ @Test public void invalidKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with valid subscription key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getSubscriptionKey()); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(INVALID_KEY); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with invalid subscription key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(getSubscriptionKey()); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test for missing endpoint */ @Test public void missingEndpoint() { assertThrows(NullPointerException.class, () -> { final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder(); builder.buildAsyncClient(); }); } /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } }
class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsAsyncClient client; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildAsyncClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .assertNext(response -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test to detect language for each {@code DetectLanguageResult} input of a batch. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> StepVerifier.create(client.detectBatchLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Test to detect language for each string input of batch, with given country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> StepVerifier.create(client.detectLanguageWithResponse(inputs, countryHint)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test to detect language for each string input of batch. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> StepVerifier.create(client.detectLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Verifies that a single DetectedLanguage is returned for a text input to detectLanguage. */ @Test /** * Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { StepVerifier.create(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that TextAnalyticsException is thrown for a empty text input. */ @Test public void detectLanguageEmptyText() { StepVerifier.create(client.detectLanguage("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); StepVerifier.create(client.detectLanguage("!@ .assertNext(response -> validatePrimaryLanguage(primaryLanguage, response)) .verifyComplete(); } /** * Verifies that a bad request exception is returned for input documents with same ids. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()))); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); StepVerifier.create(client.recognizeEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateCategorizedEntity(categorizedEntity1, response)) .assertNext(response -> validateCategorizedEntity(categorizedEntity2, response)) .verifyComplete(); } @Test public void recognizeEntitiesForEmptyText() { StepVerifier.create(client.recognizeEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeEntitiesForFaultyText() { StepVerifier.create(client.recognizeEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE))); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) .assertNext(response -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> StepVerifier.create(client.recognizeEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeEntitiesWithResponse(inputs, language)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch), "en", "Seattle", "https: StepVerifier.create(client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateLinkedEntity(linkedEntity, response)) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForEmptyText() { StepVerifier.create(client.recognizeLinkedEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { StepVerifier.create(client.recognizeLinkedEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchLinkedEntitiesWithResponse(inputs, options)) .assertNext(response -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> StepVerifier.create(client.recognizeLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeLinkedEntitiesWithResponse(inputs, language)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForTextInput() { PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); StepVerifier.create(client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.")) .assertNext(response -> validatePiiEntity(piiEntity, response)) .verifyComplete(); } @Test public void recognizePiiEntitiesForEmptyText() { StepVerifier.create(client.recognizePiiEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { StepVerifier.create(client.recognizePiiEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> StepVerifier.create(client.recognizeBatchPiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchPiiEntitiesWithResponse(inputs, options)) .assertNext(response -> validatePiiEntity(true, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> StepVerifier.create(client.recognizePiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizePiiEntitiesWithResponse(inputs, language)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForTextInput() { StepVerifier.create(client.extractKeyPhrases("Bonjour tout le monde.")) .assertNext(response -> assertEquals("monde", response)) .verifyComplete(); } @Test public void extractKeyPhrasesForEmptyText() { StepVerifier.create(client.extractKeyPhrases("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { StepVerifier.create(client.extractKeyPhrases("!@ .expectNextCount(0) .verifyComplete(); } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> StepVerifier.create(client.extractBatchKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> StepVerifier.create(client.extractBatchKeyPhrasesWithResponse(inputs, options)) .assertNext(response -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> StepVerifier.create(client.extractKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.extractKeyPhrasesWithResponse(inputs, language)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); StepVerifier .create(client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.")) .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a empty text input. */ @Test public void analyseSentimentForEmptyText() { StepVerifier.create(client.analyzeSentiment("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); StepVerifier.create(client.analyzeSentiment("!@ .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> StepVerifier.create(client.analyzeSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> StepVerifier.create(client.analyzeSentimentWithResponse(inputs, language)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> StepVerifier.create(client.analyzeBatchSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> StepVerifier.create(client.analyzeBatchSentimentWithResponse(inputs, options)) .assertNext(response -> validateSentiment(true, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(INVALID_KEY); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(getApiKey()); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test for null service version, which would take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } }
no assert ?
public void recognizeEntitiesForFaultyText() { StepVerifier.create(client.recognizeEntities("!@ .verifyComplete(); }
.verifyComplete();
public void recognizeEntitiesForFaultyText() { StepVerifier.create(client.recognizeEntities("!@ .expectNextCount(0) .verifyComplete(); }
class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsAsyncClient client; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildAsyncClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .assertNext(response -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> StepVerifier.create(client.detectBatchLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Test Detect batch languages for List of String input with country Hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> StepVerifier.create(client.detectLanguageWithResponse(inputs, countryHint)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test Detect batch languages for List of String input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> StepVerifier.create(client.detectLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage), Collections.singletonList(response))) .verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { StepVerifier.create(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that TextAnalyticsException is thrown for a empty text input. */ @Test public void detectLanguageEmptyText() { StepVerifier.create(client.detectLanguage("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); StepVerifier.create(client.detectLanguage("!@ .assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage), Collections.singletonList(response))) .verifyComplete(); } /** * Verifies that a Bad request exception is returned for input documents with same ids. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()))); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); StepVerifier.create(client.recognizeEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateCategorizedEntity(categorizedEntity1, response)) .assertNext(response -> validateCategorizedEntity(categorizedEntity2, response)) .verifyComplete(); } @Test public void recognizeEntitiesForEmptyText() { StepVerifier.create(client.recognizeEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE))); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) .assertNext(response -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> StepVerifier.create(client.recognizeEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeEntitiesWithResponse(inputs, language)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch), "en", "Seattle", "https: StepVerifier.create(client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateLinkedEntity(linkedEntity, response)) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForEmptyText() { StepVerifier.create(client.recognizeLinkedEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { StepVerifier.create(client.recognizeLinkedEntities("!@ .verifyComplete(); } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchLinkedEntitiesWithResponse(inputs, options)) .assertNext(response -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> StepVerifier.create(client.recognizeLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeLinkedEntitiesWithResponse(inputs, language)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForTextInput() { PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); StepVerifier.create(client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.")) .assertNext(response -> validatePiiEntity(piiEntity, response)) .verifyComplete(); } @Test public void recognizePiiEntitiesForEmptyText() { StepVerifier.create(client.recognizePiiEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { StepVerifier.create(client.recognizePiiEntities("!@ .verifyComplete(); } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> StepVerifier.create(client.recognizeBatchPiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchPiiEntitiesWithResponse(inputs, options)) .assertNext(response -> validatePiiEntity(true, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> StepVerifier.create(client.recognizePiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizePiiEntitiesWithResponse(inputs, language)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForTextInput() { StepVerifier.create(client.extractKeyPhrases("Bonjour tout le monde.")) .assertNext(response -> assertEquals("monde", response)) .verifyComplete(); } @Test public void extractKeyPhrasesForEmptyText() { StepVerifier.create(client.extractKeyPhrases("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { StepVerifier.create(client.extractKeyPhrases("!@ .verifyComplete(); } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> StepVerifier.create(client.extractBatchKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> StepVerifier.create(client.extractBatchKeyPhrasesWithResponse(inputs, options)) .assertNext(response -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> StepVerifier.create(client.extractKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.extractKeyPhrasesWithResponse(inputs, language)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); StepVerifier .create(client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.")) .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a empty text input. */ @Test public void analyseSentimentForEmptyText() { StepVerifier.create(client.analyzeSentiment("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); StepVerifier.create(client.analyzeSentiment("!@ .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> StepVerifier.create(client.analyzeSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> StepVerifier.create(client.analyzeSentimentWithResponse(inputs, language)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> StepVerifier.create(client.analyzeBatchSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> StepVerifier.create(client.analyzeBatchSentimentWithResponse(inputs, options)) .assertNext(response -> validateSentiment(true, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test client builder with valid subscription key */ @Test public void validKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getSubscriptionKey())).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test client builder with invalid subscription key */ @Test public void invalidKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with valid subscription key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getSubscriptionKey()); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(INVALID_KEY); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with invalid subscription key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(getSubscriptionKey()); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test for missing endpoint */ @Test public void missingEndpoint() { assertThrows(NullPointerException.class, () -> { final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder(); builder.buildAsyncClient(); }); } /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } }
class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsAsyncClient client; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildAsyncClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .assertNext(response -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test to detect language for each {@code DetectLanguageResult} input of a batch. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> StepVerifier.create(client.detectBatchLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Test to detect language for each string input of batch, with given country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> StepVerifier.create(client.detectLanguageWithResponse(inputs, countryHint)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test to detect language for each string input of batch. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> StepVerifier.create(client.detectLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Verifies that a single DetectedLanguage is returned for a text input to detectLanguage. */ @Test public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(primaryLanguage, response)) .verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { StepVerifier.create(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that TextAnalyticsException is thrown for a empty text input. */ @Test public void detectLanguageEmptyText() { StepVerifier.create(client.detectLanguage("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); StepVerifier.create(client.detectLanguage("!@ .assertNext(response -> validatePrimaryLanguage(primaryLanguage, response)) .verifyComplete(); } /** * Verifies that a bad request exception is returned for input documents with same ids. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()))); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); StepVerifier.create(client.recognizeEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateCategorizedEntity(categorizedEntity1, response)) .assertNext(response -> validateCategorizedEntity(categorizedEntity2, response)) .verifyComplete(); } @Test public void recognizeEntitiesForEmptyText() { StepVerifier.create(client.recognizeEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE))); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) .assertNext(response -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> StepVerifier.create(client.recognizeEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeEntitiesWithResponse(inputs, language)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch), "en", "Seattle", "https: StepVerifier.create(client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateLinkedEntity(linkedEntity, response)) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForEmptyText() { StepVerifier.create(client.recognizeLinkedEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { StepVerifier.create(client.recognizeLinkedEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchLinkedEntitiesWithResponse(inputs, options)) .assertNext(response -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> StepVerifier.create(client.recognizeLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeLinkedEntitiesWithResponse(inputs, language)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForTextInput() { PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); StepVerifier.create(client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.")) .assertNext(response -> validatePiiEntity(piiEntity, response)) .verifyComplete(); } @Test public void recognizePiiEntitiesForEmptyText() { StepVerifier.create(client.recognizePiiEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { StepVerifier.create(client.recognizePiiEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> StepVerifier.create(client.recognizeBatchPiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchPiiEntitiesWithResponse(inputs, options)) .assertNext(response -> validatePiiEntity(true, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> StepVerifier.create(client.recognizePiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizePiiEntitiesWithResponse(inputs, language)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForTextInput() { StepVerifier.create(client.extractKeyPhrases("Bonjour tout le monde.")) .assertNext(response -> assertEquals("monde", response)) .verifyComplete(); } @Test public void extractKeyPhrasesForEmptyText() { StepVerifier.create(client.extractKeyPhrases("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { StepVerifier.create(client.extractKeyPhrases("!@ .expectNextCount(0) .verifyComplete(); } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> StepVerifier.create(client.extractBatchKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> StepVerifier.create(client.extractBatchKeyPhrasesWithResponse(inputs, options)) .assertNext(response -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> StepVerifier.create(client.extractKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.extractKeyPhrasesWithResponse(inputs, language)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); StepVerifier .create(client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.")) .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a empty text input. */ @Test public void analyseSentimentForEmptyText() { StepVerifier.create(client.analyzeSentiment("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); StepVerifier.create(client.analyzeSentiment("!@ .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> StepVerifier.create(client.analyzeSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> StepVerifier.create(client.analyzeSentimentWithResponse(inputs, language)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> StepVerifier.create(client.analyzeBatchSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> StepVerifier.create(client.analyzeBatchSentimentWithResponse(inputs, options)) .assertNext(response -> validateSentiment(true, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(INVALID_KEY); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(getApiKey()); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test for null service version, which would take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } }
updated
public void recognizeEntitiesForFaultyText() { StepVerifier.create(client.recognizeEntities("!@ .verifyComplete(); }
.verifyComplete();
public void recognizeEntitiesForFaultyText() { StepVerifier.create(client.recognizeEntities("!@ .expectNextCount(0) .verifyComplete(); }
class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsAsyncClient client; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildAsyncClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .assertNext(response -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> StepVerifier.create(client.detectBatchLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Test Detect batch languages for List of String input with country Hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> StepVerifier.create(client.detectLanguageWithResponse(inputs, countryHint)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test Detect batch languages for List of String input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> StepVerifier.create(client.detectLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage), Collections.singletonList(response))) .verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { StepVerifier.create(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that TextAnalyticsException is thrown for a empty text input. */ @Test public void detectLanguageEmptyText() { StepVerifier.create(client.detectLanguage("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); StepVerifier.create(client.detectLanguage("!@ .assertNext(response -> validateDetectedLanguages(Collections.singletonList(primaryLanguage), Collections.singletonList(response))) .verifyComplete(); } /** * Verifies that a Bad request exception is returned for input documents with same ids. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()))); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); StepVerifier.create(client.recognizeEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateCategorizedEntity(categorizedEntity1, response)) .assertNext(response -> validateCategorizedEntity(categorizedEntity2, response)) .verifyComplete(); } @Test public void recognizeEntitiesForEmptyText() { StepVerifier.create(client.recognizeEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE))); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) .assertNext(response -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> StepVerifier.create(client.recognizeEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeEntitiesWithResponse(inputs, language)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch), "en", "Seattle", "https: StepVerifier.create(client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateLinkedEntity(linkedEntity, response)) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForEmptyText() { StepVerifier.create(client.recognizeLinkedEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { StepVerifier.create(client.recognizeLinkedEntities("!@ .verifyComplete(); } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchLinkedEntitiesWithResponse(inputs, options)) .assertNext(response -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> StepVerifier.create(client.recognizeLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeLinkedEntitiesWithResponse(inputs, language)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForTextInput() { PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); StepVerifier.create(client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.")) .assertNext(response -> validatePiiEntity(piiEntity, response)) .verifyComplete(); } @Test public void recognizePiiEntitiesForEmptyText() { StepVerifier.create(client.recognizePiiEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { StepVerifier.create(client.recognizePiiEntities("!@ .verifyComplete(); } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> StepVerifier.create(client.recognizeBatchPiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchPiiEntitiesWithResponse(inputs, options)) .assertNext(response -> validatePiiEntity(true, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> StepVerifier.create(client.recognizePiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizePiiEntitiesWithResponse(inputs, language)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForTextInput() { StepVerifier.create(client.extractKeyPhrases("Bonjour tout le monde.")) .assertNext(response -> assertEquals("monde", response)) .verifyComplete(); } @Test public void extractKeyPhrasesForEmptyText() { StepVerifier.create(client.extractKeyPhrases("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { StepVerifier.create(client.extractKeyPhrases("!@ .verifyComplete(); } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> StepVerifier.create(client.extractBatchKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> StepVerifier.create(client.extractBatchKeyPhrasesWithResponse(inputs, options)) .assertNext(response -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> StepVerifier.create(client.extractKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.extractKeyPhrasesWithResponse(inputs, language)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); StepVerifier .create(client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.")) .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a empty text input. */ @Test public void analyseSentimentForEmptyText() { StepVerifier.create(client.analyzeSentiment("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); StepVerifier.create(client.analyzeSentiment("!@ .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> StepVerifier.create(client.analyzeSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> StepVerifier.create(client.analyzeSentimentWithResponse(inputs, language)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> StepVerifier.create(client.analyzeBatchSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> StepVerifier.create(client.analyzeBatchSentimentWithResponse(inputs, options)) .assertNext(response -> validateSentiment(true, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test client builder with valid subscription key */ @Test public void validKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getSubscriptionKey())).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test client builder with invalid subscription key */ @Test public void invalidKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with valid subscription key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getSubscriptionKey()); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(INVALID_KEY); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with invalid subscription key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(getSubscriptionKey()); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test for missing endpoint */ @Test public void missingEndpoint() { assertThrows(NullPointerException.class, () -> { final TextAnalyticsClientBuilder builder = new TextAnalyticsClientBuilder(); builder.buildAsyncClient(); }); } /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .subscriptionKey(new TextAnalyticsApiKeyCredential(getSubscriptionKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(response))) .verifyComplete(); } }
class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsAsyncClient client; @BeforeAll static void beforeAll() { StepVerifier.setDefaultTimeout(Duration.ofSeconds(30)); } @AfterAll static void afterAll() { StepVerifier.resetDefaultTimeout(); } @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildAsyncClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .assertNext(response -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test to detect language for each {@code DetectLanguageResult} input of a batch. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> StepVerifier.create(client.detectBatchLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Test to detect language for each string input of batch, with given country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> StepVerifier.create(client.detectLanguageWithResponse(inputs, countryHint)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response.getValue())) .verifyComplete()); } /** * Test to detect language for each string input of batch. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> StepVerifier.create(client.detectLanguage(inputs)) .assertNext(response -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), response)) .verifyComplete()); } /** * Verifies that a single DetectedLanguage is returned for a text input to detectLanguage. */ @Test public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 1.0); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(primaryLanguage, response)) .verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { StepVerifier.create(client.detectLanguageWithResponse("Este es un document escrito en Español.", "en")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that TextAnalyticsException is thrown for a empty text input. */ @Test public void detectLanguageEmptyText() { StepVerifier.create(client.detectLanguage("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); StepVerifier.create(client.detectLanguage("!@ .assertNext(response -> validatePrimaryLanguage(primaryLanguage, response)) .verifyComplete(); } /** * Verifies that a bad request exception is returned for input documents with same ids. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> StepVerifier.create(client.detectBatchLanguageWithResponse(inputs, options)) .verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()))); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); StepVerifier.create(client.recognizeEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateCategorizedEntity(categorizedEntity1, response)) .assertNext(response -> validateCategorizedEntity(categorizedEntity2, response)) .verifyComplete(); } @Test public void recognizeEntitiesForEmptyText() { StepVerifier.create(client.recognizeEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE))); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchEntitiesWithResponse(inputs, options)) .assertNext(response -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> StepVerifier.create(client.recognizeEntities(inputs)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response)) .verifyComplete()); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeEntitiesWithResponse(inputs, language)) .assertNext(response -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch), "en", "Seattle", "https: StepVerifier.create(client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.")) .assertNext(response -> validateLinkedEntity(linkedEntity, response)) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForEmptyText() { StepVerifier.create(client.recognizeLinkedEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { StepVerifier.create(client.recognizeLinkedEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> StepVerifier.create(client.recognizeBatchLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchLinkedEntitiesWithResponse(inputs, options)) .assertNext(response -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> StepVerifier.create(client.recognizeLinkedEntities(inputs)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response)) .verifyComplete()); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizeLinkedEntitiesWithResponse(inputs, language)) .assertNext(response -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForTextInput() { PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); StepVerifier.create(client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's.")) .assertNext(response -> validatePiiEntity(piiEntity, response)) .verifyComplete(); } @Test public void recognizePiiEntitiesForEmptyText() { StepVerifier.create(client.recognizePiiEntities("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { StepVerifier.create(client.recognizePiiEntities("!@ .expectNextCount(0) .verifyComplete(); } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> StepVerifier.create(client.recognizeBatchPiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> StepVerifier.create(client.recognizeBatchPiiEntitiesWithResponse(inputs, options)) .assertNext(response -> validatePiiEntity(true, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> StepVerifier.create(client.recognizePiiEntities(inputs)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response)) .verifyComplete()); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> StepVerifier.create(client.recognizePiiEntitiesWithResponse(inputs, language)) .assertNext(response -> validatePiiEntity(false, getExpectedBatchPiiEntities(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForTextInput() { StepVerifier.create(client.extractKeyPhrases("Bonjour tout le monde.")) .assertNext(response -> assertEquals("monde", response)) .verifyComplete(); } @Test public void extractKeyPhrasesForEmptyText() { StepVerifier.create(client.extractKeyPhrases("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { StepVerifier.create(client.extractKeyPhrases("!@ .expectNextCount(0) .verifyComplete(); } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> StepVerifier.create(client.extractBatchKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> StepVerifier.create(client.extractBatchKeyPhrasesWithResponse(inputs, options)) .assertNext(response -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> StepVerifier.create(client.extractKeyPhrases(inputs)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response)) .verifyComplete()); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> StepVerifier.create(client.extractKeyPhrasesWithResponse(inputs, language)) .assertNext(response -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); StepVerifier .create(client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi.")) .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Verifies that an TextAnalyticsException is thrown for a empty text input. */ @Test public void analyseSentimentForEmptyText() { StepVerifier.create(client.analyzeSentiment("")) .expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException && throwable.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); StepVerifier.create(client.analyzeSentiment("!@ .assertNext(response -> validateAnalysedSentiment(expectedDocumentSentiment, response)).verifyComplete(); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> StepVerifier.create(client.analyzeSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> StepVerifier.create(client.analyzeSentimentWithResponse(inputs, language)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> StepVerifier.create(client.analyzeBatchSentiment(inputs)) .assertNext(response -> validateSentiment(false, getExpectedBatchTextSentiment(), response)) .verifyComplete()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> StepVerifier.create(client.analyzeBatchSentimentWithResponse(inputs, options)) .assertNext(response -> validateSentiment(true, getExpectedBatchTextSentiment(), response.getValue())) .verifyComplete()); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildAsyncClient(); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(INVALID_KEY); StepVerifier.create(client.detectLanguage("This is a test English Text")) .verifyError(HttpResponseException.class); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsAsyncClient client = createClientBuilder(getEndpoint(), credential).buildAsyncClient(); credential.updateCredential(getApiKey()); StepVerifier.create(client.detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test for null service version, which would take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } StepVerifier.create(clientBuilder.buildAsyncClient().detectLanguage("This is a test English Text")) .assertNext(response -> validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), response)) .verifyComplete(); } }
Can use `validatePrimaryLanguage` here and below.
public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 0.0); List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage); validateDetectedLanguages( Collections.singletonList(client.detectLanguage("This is a test English Text")), expectedLanguageList); }
validateDetectedLanguages(
public void detectSingleTextLanguage() { validatePrimaryLanguage(new DetectedLanguage("English", "en", 0.0), client.detectLanguage("This is a test English Text")); }
class TextAnalyticsClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsClient client; @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs))); } /** * Test detect batch languages for a list of string input with country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue())); } /** * Test detect batch languages for a list of string input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs))); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test /** * Verifies that an exception is thrown when null text is passed. */ @Test public void detectLanguagesNullInput() { assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null, Context.NONE).getValue()); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void detectLanguageEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); validateDetectedLanguages(Collections.singletonList(client.detectLanguage("!@ Collections.singletonList(primaryLanguage)); } /** * Verifies that a TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE)); assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that a bad request exception is returned for input documents with same IDs. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> { HttpResponseException response = assertThrows(HttpResponseException.class, () -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE)); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode()); }); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); final List<CategorizedEntity> entities = client.recognizeEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateCategorizedEntity(categorizedEntity1, entities.get(0)); validateCategorizedEntity(categorizedEntity2, entities.get(1)); } @Test public void recognizeEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeEntitiesForFaultyText() { assertFalse(client.recognizeEntities("!@ } @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> { DocumentResultCollection<RecognizeEntitiesResult> l = client.recognizeBatchEntities(inputs); for (RecognizeEntitiesResult recognizeEntitiesResult : l) { Exception exception = assertThrows(TextAnalyticsException.class, () -> recognizeEntitiesResult.getEntities()); assertTrue(exception.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE)); } }); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntities(inputs))); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs))); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs, language, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForTextInput() { final PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); final PagedIterable<PiiEntity> entities = client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's."); validatePiiEntity(piiEntity, entities.iterator().next()); } @Test public void recognizePiiEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizePiiEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { assertFalse(client.recognizePiiEntities("!@ } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntities(inputs))); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> validatePiiEntity(true, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs))); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https: final List<LinkedEntity> linkedEntities = client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateLinkedEntity(linkedEntity1, linkedEntities.get(0)); } @Test public void recognizeLinkedEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeLinkedEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { assertFalse(client.recognizeLinkedEntities("!@ } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForTextInput() { assertEquals("monde", client.extractKeyPhrases("Bonjour tout le monde.").iterator().next()); } @Test public void extractKeyPhrasesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.extractKeyPhrases("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { assertFalse(client.extractKeyPhrases("!@ } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi."); validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void analyseSentimentForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.analyzeSentiment("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("!@ validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs))); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeBatchSentiment(inputs))); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> validateSentiment(true, getExpectedBatchTextSentiment(), client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildClient(); validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(client.detectLanguage("This is a test English Text"))); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient(); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(INVALID_KEY); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(getApiKey()); validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(client.detectLanguage("This is a test English Text"))); } /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(clientBuilder.buildClient().detectLanguage("This is a test English Text"))); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(clientBuilder.buildClient().detectLanguage("This is a test English Text"))); } }
class TextAnalyticsClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsClient client; @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs))); } /** * Test detect batch languages for a list of string input with country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue())); } /** * Test detect batch languages for a list of string input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs))); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test /** * Verifies that an exception is thrown when null text is passed. */ @Test public void detectLanguagesNullInput() { assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null, Context.NONE).getValue()); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void detectLanguageEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); validatePrimaryLanguage(client.detectLanguage("!@ } /** * Verifies that a TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE)); assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that a bad request exception is returned for input documents with same IDs. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> { HttpResponseException response = assertThrows(HttpResponseException.class, () -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE)); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode()); }); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); final List<CategorizedEntity> entities = client.recognizeEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateCategorizedEntity(categorizedEntity1, entities.get(0)); validateCategorizedEntity(categorizedEntity2, entities.get(1)); } @Test public void recognizeEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeEntitiesForFaultyText() { assertFalse(client.recognizeEntities("!@ } @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> { DocumentResultCollection<RecognizeEntitiesResult> l = client.recognizeBatchEntities(inputs); for (RecognizeEntitiesResult recognizeEntitiesResult : l) { Exception exception = assertThrows(TextAnalyticsException.class, () -> recognizeEntitiesResult.getEntities()); assertTrue(exception.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE)); } }); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntities(inputs))); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs))); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs, language, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForTextInput() { final PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); final PagedIterable<PiiEntity> entities = client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's."); validatePiiEntity(piiEntity, entities.iterator().next()); } @Test public void recognizePiiEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizePiiEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { assertFalse(client.recognizePiiEntities("!@ } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntities(inputs))); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> validatePiiEntity(true, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs))); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https: final List<LinkedEntity> linkedEntities = client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateLinkedEntity(linkedEntity1, linkedEntities.get(0)); } @Test public void recognizeLinkedEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeLinkedEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { assertFalse(client.recognizeLinkedEntities("!@ } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForTextInput() { assertEquals("monde", client.extractKeyPhrases("Bonjour tout le monde.").iterator().next()); } @Test public void extractKeyPhrasesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.extractKeyPhrases("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { assertFalse(client.extractKeyPhrases("!@ } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi."); validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void analyseSentimentForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.analyzeSentiment("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("!@ validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs))); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeBatchSentiment(inputs))); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> validateSentiment(true, getExpectedBatchTextSentiment(), client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildClient(); validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), client.detectLanguage("This is a test English Text")); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient(); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(INVALID_KEY); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(getApiKey()); validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), client.detectLanguage("This is a test English Text")); } /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), clientBuilder.buildClient().detectLanguage("This is a test English Text")); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), clientBuilder.buildClient().detectLanguage("This is a test English Text")); } }
use `validatePrimaryLanguage` instead.
public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(getApiKey()); validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(client.detectLanguage("This is a test English Text"))); }
validateDetectedLanguages(
public void updateToValidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(INVALID_KEY); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(getApiKey()); validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), client.detectLanguage("This is a test English Text")); }
class TextAnalyticsClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsClient client; @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs))); } /** * Test detect batch languages for a list of string input with country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue())); } /** * Test detect batch languages for a list of string input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs))); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test public void detectSingleTextLanguage() { DetectedLanguage primaryLanguage = new DetectedLanguage("English", "en", 0.0); List<DetectedLanguage> expectedLanguageList = Collections.singletonList(primaryLanguage); validateDetectedLanguages( Collections.singletonList(client.detectLanguage("This is a test English Text")), expectedLanguageList); } /** * Verifies that an exception is thrown when null text is passed. */ @Test public void detectLanguagesNullInput() { assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null, Context.NONE).getValue()); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void detectLanguageEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); validateDetectedLanguages(Collections.singletonList(client.detectLanguage("!@ Collections.singletonList(primaryLanguage)); } /** * Verifies that a TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE)); assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that a bad request exception is returned for input documents with same IDs. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> { HttpResponseException response = assertThrows(HttpResponseException.class, () -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE)); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode()); }); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); final List<CategorizedEntity> entities = client.recognizeEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateCategorizedEntity(categorizedEntity1, entities.get(0)); validateCategorizedEntity(categorizedEntity2, entities.get(1)); } @Test public void recognizeEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeEntitiesForFaultyText() { assertFalse(client.recognizeEntities("!@ } @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> { DocumentResultCollection<RecognizeEntitiesResult> l = client.recognizeBatchEntities(inputs); for (RecognizeEntitiesResult recognizeEntitiesResult : l) { Exception exception = assertThrows(TextAnalyticsException.class, () -> recognizeEntitiesResult.getEntities()); assertTrue(exception.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE)); } }); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntities(inputs))); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs))); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs, language, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForTextInput() { final PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); final PagedIterable<PiiEntity> entities = client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's."); validatePiiEntity(piiEntity, entities.iterator().next()); } @Test public void recognizePiiEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizePiiEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { assertFalse(client.recognizePiiEntities("!@ } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntities(inputs))); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> validatePiiEntity(true, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs))); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https: final List<LinkedEntity> linkedEntities = client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateLinkedEntity(linkedEntity1, linkedEntities.get(0)); } @Test public void recognizeLinkedEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeLinkedEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { assertFalse(client.recognizeLinkedEntities("!@ } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForTextInput() { assertEquals("monde", client.extractKeyPhrases("Bonjour tout le monde.").iterator().next()); } @Test public void extractKeyPhrasesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.extractKeyPhrases("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { assertFalse(client.extractKeyPhrases("!@ } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi."); validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void analyseSentimentForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.analyzeSentiment("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("!@ validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs))); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeBatchSentiment(inputs))); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> validateSentiment(true, getExpectedBatchTextSentiment(), client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildClient(); validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(client.detectLanguage("This is a test English Text"))); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient(); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(INVALID_KEY); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(clientBuilder.buildClient().detectLanguage("This is a test English Text"))); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validateDetectedLanguages( Collections.singletonList(new DetectedLanguage("English", "en", 1.0)), Collections.singletonList(clientBuilder.buildClient().detectLanguage("This is a test English Text"))); } }
class TextAnalyticsClientTest extends TextAnalyticsClientTestBase { private TextAnalyticsClient client; @Override protected void beforeTest() { client = clientSetup(httpPipeline -> new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .pipeline(httpPipeline) .buildClient()); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void detectLanguagesBatchInputShowStatistics() { detectLanguageShowStatisticsRunner((inputs, options) -> validateDetectLanguage(true, getExpectedBatchDetectedLanguages(), client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test Detect batch input languages. */ @Test public void detectLanguagesBatchInput() { detectLanguageRunner((inputs) -> validateDetectLanguage(false, getExpectedBatchDetectedLanguages(), client.detectBatchLanguages(inputs))); } /** * Test detect batch languages for a list of string input with country hint. */ @Test public void detectLanguagesBatchListCountryHint() { detectLanguagesCountryHintRunner((inputs, countryHint) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguagesWithResponse(inputs, countryHint, Context.NONE).getValue())); } /** * Test detect batch languages for a list of string input. */ @Test public void detectLanguagesBatchStringInput() { detectLanguageStringInputRunner((inputs) -> validateDetectLanguage( false, getExpectedBatchDetectedLanguages(), client.detectLanguages(inputs))); } /** * Verifies that a single DetectLanguageResult is returned for a text input to detectLanguages. */ @Test public void detectSingleTextLanguage() { validatePrimaryLanguage(new DetectedLanguage("English", "en", 0.0), client.detectLanguage("This is a test English Text")); } /** * Verifies that an exception is thrown when null text is passed. */ @Test public void detectLanguagesNullInput() { assertThrows(NullPointerException.class, () -> client.detectBatchLanguagesWithResponse(null, null, Context.NONE).getValue()); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void detectLanguageEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguage("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that detectLanguage returns an "UNKNOWN" result when faulty text is passed. */ @Test public void detectLanguageFaultyText() { DetectedLanguage primaryLanguage = new DetectedLanguage("(Unknown)", "(Unknown)", 0.0); validatePrimaryLanguage(client.detectLanguage("!@ } /** * Verifies that a TextAnalyticsException is thrown for a text input with invalid country hint. */ @Test public void detectLanguageInvalidCountryHint() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.detectLanguageWithResponse("Este es un document escrito en Español.", "en", Context.NONE)); assertTrue(exception.getMessage().equals(INVALID_COUNTRY_HINT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Verifies that a bad request exception is returned for input documents with same IDs. */ @Test public void detectLanguageDuplicateIdInput() { detectLanguageDuplicateIdRunner((inputs, options) -> { HttpResponseException response = assertThrows(HttpResponseException.class, () -> client.detectBatchLanguagesWithResponse(inputs, options, Context.NONE)); assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getResponse().getStatusCode()); }); } @Test public void recognizeEntitiesForTextInput() { final CategorizedEntity categorizedEntity1 = new CategorizedEntity("Seattle", "Location", null, 26, 7, 0.0); final CategorizedEntity categorizedEntity2 = new CategorizedEntity("last week", "DateTime", "DateRange", 34, 9, 0.0); final List<CategorizedEntity> entities = client.recognizeEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateCategorizedEntity(categorizedEntity1, entities.get(0)); validateCategorizedEntity(categorizedEntity2, entities.get(1)); } @Test public void recognizeEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeEntitiesForFaultyText() { assertFalse(client.recognizeEntities("!@ } @Test public void recognizeEntitiesBatchInputSingleError() { recognizeBatchCategorizedEntitySingleErrorRunner((inputs) -> { DocumentResultCollection<RecognizeEntitiesResult> l = client.recognizeBatchEntities(inputs); for (RecognizeEntitiesResult recognizeEntitiesResult : l) { Exception exception = assertThrows(TextAnalyticsException.class, () -> recognizeEntitiesResult.getEntities()); assertTrue(exception.getMessage().equals(BATCH_ERROR_EXCEPTION_MESSAGE)); } }); } @Test public void recognizeEntitiesForBatchInput() { recognizeBatchCategorizedEntityRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntities(inputs))); } @Test public void recognizeEntitiesForBatchInputShowStatistics() { recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) -> validateCategorizedEntity(true, getExpectedBatchCategorizedEntities(), client.recognizeBatchEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeEntitiesForBatchStringInput() { recognizeCategorizedEntityStringInputRunner((inputs) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs))); } @Test public void recognizeEntitiesForListLanguageHint() { recognizeCatgeorizedEntitiesLanguageHintRunner((inputs, language) -> validateCategorizedEntity(false, getExpectedBatchCategorizedEntities(), client.recognizeEntities(inputs, language, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForTextInput() { final PiiEntity piiEntity = new PiiEntity("859-98-0987", "U.S. Social Security Number (SSN)", "", 28, 11, 0.0); final PagedIterable<PiiEntity> entities = client.recognizePiiEntities("Microsoft employee with ssn 859-98-0987 is using our awesome API's."); validatePiiEntity(piiEntity, entities.iterator().next()); } @Test public void recognizePiiEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizePiiEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizePiiEntitiesForFaultyText() { assertFalse(client.recognizePiiEntities("!@ } @Test public void recognizePiiEntitiesForBatchInput() { recognizeBatchPiiRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntities(inputs))); } @Test public void recognizePiiEntitiesForBatchInputShowStatistics() { recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) -> validatePiiEntity(true, getExpectedBatchPiiEntities(), client.recognizeBatchPiiEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizePiiEntitiesForBatchStringInput() { recognizePiiStringInputRunner((inputs) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntities(inputs))); } @Test public void recognizePiiEntitiesForListLanguageHint() { recognizePiiLanguageHintRunner((inputs, language) -> validatePiiEntity(false, getExpectedBatchPiiEntities(), client.recognizePiiEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForTextInput() { final LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("Seattle", 0.0, 7, 26); final LinkedEntity linkedEntity1 = new LinkedEntity("Seattle", Collections.singletonList(linkedEntityMatch1), "en", "Seattle", "https: final List<LinkedEntity> linkedEntities = client.recognizeLinkedEntities("I had a wonderful trip to Seattle last week.").stream().collect(Collectors.toList()); validateLinkedEntity(linkedEntity1, linkedEntities.get(0)); } @Test public void recognizeLinkedEntitiesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.recognizeLinkedEntities("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void recognizeLinkedEntitiesForFaultyText() { assertFalse(client.recognizeLinkedEntities("!@ } @Test public void recognizeLinkedEntitiesForBatchInput() { recognizeBatchLinkedEntityRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForBatchInputShowStatistics() { recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) -> validateLinkedEntity(true, getExpectedBatchLinkedEntities(), client.recognizeBatchLinkedEntitiesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void recognizeLinkedEntitiesForBatchStringInput() { recognizeLinkedStringInputRunner((inputs) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntities(inputs))); } @Test public void recognizeLinkedEntitiesForListLanguageHint() { recognizeLinkedLanguageHintRunner((inputs, language) -> validateLinkedEntity(false, getExpectedBatchLinkedEntities(), client.recognizeLinkedEntitiesWithResponse(inputs, language, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForTextInput() { assertEquals("monde", client.extractKeyPhrases("Bonjour tout le monde.").iterator().next()); } @Test public void extractKeyPhrasesForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.extractKeyPhrases("").iterator().hasNext()); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } @Test public void extractKeyPhrasesForFaultyText() { assertFalse(client.extractKeyPhrases("!@ } @Test public void extractKeyPhrasesForBatchInput() { extractBatchKeyPhrasesRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForBatchInputShowStatistics() { extractBatchKeyPhrasesShowStatsRunner((inputs, options) -> validateExtractKeyPhrase(true, getExpectedBatchKeyPhrases(), client.extractBatchKeyPhrasesWithResponse(inputs, options, Context.NONE).getValue())); } @Test public void extractKeyPhrasesForBatchStringInput() { extractKeyPhrasesStringInputRunner((inputs) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrases(inputs))); } @Test public void extractKeyPhrasesForListLanguageHint() { extractKeyPhrasesLanguageHintRunner((inputs, language) -> validateExtractKeyPhrase(false, getExpectedBatchKeyPhrases(), client.extractKeyPhrasesWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for a string input. */ @Test public void analyseSentimentForTextInput() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment( SentimentLabel.MIXED, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEGATIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 31, 0), new SentenceSentiment(SentimentLabel.POSITIVE, new SentimentScorePerLabel(0.0, 0.0, 0.0), 35, 32) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("The hotel was dark and unclean. The restaurant had amazing gnocchi."); validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Verifies that a TextAnalyticsException is thrown for an empty text input. */ @Test public void analyseSentimentForEmptyText() { Exception exception = assertThrows(TextAnalyticsException.class, () -> client.analyzeSentiment("")); assertTrue(exception.getMessage().equals(INVALID_DOCUMENT_EXPECTED_EXCEPTION_MESSAGE)); } /** * Test analyzing sentiment for a faulty input text. */ @Test public void analyseSentimentForFaultyText() { final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), Arrays.asList( new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 1, 0), new SentenceSentiment(SentimentLabel.NEUTRAL, new SentimentScorePerLabel(0.0, 0.0, 0.0), 4, 1) )); DocumentSentiment analyzeSentimentResult = client.analyzeSentiment("!@ validateAnalysedSentiment(expectedDocumentSentiment, analyzeSentimentResult); } /** * Test analyzing sentiment for a list of string input. */ @Test public void analyseSentimentForBatchStringInput() { analyseSentimentStringInputRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentiment(inputs))); } /** * Test analyzing sentiment for a list of string input with language hint. */ @Test public void analyseSentimentForListLanguageHint() { analyseSentimentLanguageHintRunner((inputs, language) -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeSentimentWithResponse(inputs, language, Context.NONE).getValue())); } /** * Test analyzing sentiment for batch input. */ @Test public void analyseSentimentForBatchInput() { analyseBatchSentimentRunner(inputs -> validateSentiment(false, getExpectedBatchTextSentiment(), client.analyzeBatchSentiment(inputs))); } /** * Verify that we can get statistics on the collection result when given a batch input with options. */ @Test public void analyseSentimentForBatchInputShowStatistics() { analyseBatchSentimentShowStatsRunner((inputs, options) -> validateSentiment(true, getExpectedBatchTextSentiment(), client.analyzeBatchSentimentWithResponse(inputs, options, Context.NONE).getValue())); } /** * Test client builder with valid API key */ @Test public void validKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(getApiKey())).buildClient(); validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), client.detectLanguage("This is a test English Text")); } /** * Test client builder with invalid API key */ @Test public void invalidKey() { final TextAnalyticsClient client = createClientBuilder(getEndpoint(), new TextAnalyticsApiKeyCredential(INVALID_KEY)).buildClient(); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with valid API key but update to invalid key and make call to server. */ @Test public void updateToInvalidKey() { final TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential(getApiKey()); final TextAnalyticsClient client = createClientBuilder(getEndpoint(), credential).buildClient(); credential.updateCredential(INVALID_KEY); assertThrows(HttpResponseException.class, () -> client.detectLanguage("This is a test English Text")); } /** * Test client with invalid API key but update to valid key and make call to server. */ @Test /** * Test for null service version, which would take take the default service version by default */ @Test public void nullServiceVersion() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .retryPolicy(new RetryPolicy()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)) .serviceVersion(null); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), clientBuilder.buildClient().detectLanguage("This is a test English Text")); } /** * Test for default pipeline in client builder */ @Test public void defaultPipeline() { final TextAnalyticsClientBuilder clientBuilder = new TextAnalyticsClientBuilder() .endpoint(getEndpoint()) .apiKey(new TextAnalyticsApiKeyCredential(getApiKey())) .configuration(Configuration.getGlobalConfiguration()) .httpLogOptions(new HttpLogOptions().setLogLevel(HttpLogDetailLevel.BODY_AND_HEADERS)); if (interceptorManager.isPlaybackMode()) { clientBuilder.httpClient(interceptorManager.getPlaybackClient()); } else { clientBuilder.httpClient(new NettyAsyncHttpClientBuilder().wiretap(true).build()) .addPolicy(interceptorManager.getRecordPolicy()); } validatePrimaryLanguage(new DetectedLanguage("English", "en", 1.0), clientBuilder.buildClient().detectLanguage("This is a test English Text")); } }
consider renaming this to `detectLanguageBatchWithResponse` to be consistent
Mono<Response<DetectLanguageResult>> detectLanguageWithResponse(String text, String countryHint, Context context) { Objects.requireNonNull(text, "'text' cannot be null."); List<DetectLanguageInput> languageInputs = Collections.singletonList(new DetectLanguageInput("0", text, countryHint)); return detectBatchLanguageWithResponse(languageInputs, null, context) .map(Transforms::processSingleResponseErrorResult); }
return detectBatchLanguageWithResponse(languageInputs, null, context)
new DetectLanguageInput("0", text, countryHint)); return detectLanguageBatchWithResponse(languageInputs, null, context) .map(response -> new SimpleResponse<>(response, Transforms.processSingleResponseErrorResult(response).getValue().getPrimaryLanguage())); } Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( List<String> textInputs, String countryHint, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); List<DetectLanguageInput> detectLanguageInputs = mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)); return detectLanguageBatchWithResponse(detectLanguageInputs, options, context); }
class DetectLanguageAsyncClient { private final ClientLogger logger = new ClientLogger(DetectLanguageAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code DetectLanguageAsyncClient} that sends requests to the Text Analytics services's detect language * endpoint. * * @param service The proxy service used to perform REST calls. */ DetectLanguageAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DetectLanguageResult>> detectLanguageWithResponse(String text, String countryHint, Context context) { Objects.requireNonNull(text, "'text' cannot be null."); List<DetectLanguageInput> languageInputs = Collections.singletonList( Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectBatchLanguageWithResponse( List<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final LanguageBatchInput languageBatchInput = new LanguageBatchInput() .setDocuments(textInputs.stream().map(detectLanguageInput -> new LanguageInput() .setId(detectLanguageInput.getId()).setText(detectLanguageInput.getText()) .setCountryHint(detectLanguageInput.getCountryHint())).collect(Collectors.toList())); return service.languagesWithRestResponseAsync( languageBatchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of language input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of detected language output - {}", response.getValue())) .doOnError(error -> logger.warning("Failed to detect languages - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link LanguageResult} to {@link DocumentResultCollection}. * * @param languageResult the {@link LanguageResult} returned by the service. * * @return the {@link DocumentResultCollection} of {@link DetectLanguageResult} to be returned by the SDK. */ private static DocumentResultCollection<DetectLanguageResult> toDocumentResultCollection( final LanguageResult languageResult) { final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (DocumentLanguage documentLanguage : languageResult.getDocuments()) { DetectedLanguage primaryLanguage = null; List<com.azure.ai.textanalytics.implementation.models.DetectedLanguage> detectedLanguages = documentLanguage.getDetectedLanguages(); if (detectedLanguages.size() >= 1) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguageResult = detectedLanguages.get(0); primaryLanguage = new DetectedLanguage(detectedLanguageResult.getName(), detectedLanguageResult.getIso6391Name(), detectedLanguageResult.getScore()); } detectLanguageResults.add(new DetectLanguageResult(documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : Transforms.toTextDocumentStatistics(documentLanguage.getStatistics()), null, primaryLanguage)); } for (DocumentError documentError : languageResult.getErrors()) { com.azure.ai.textanalytics.models.TextAnalyticsError error = Transforms.toTextAnalyticsError(documentError.getError()); detectLanguageResults.add( new DetectLanguageResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : Transforms.toBatchStatistics(languageResult.getStatistics())); } }
class DetectLanguageAsyncClient { private final ClientLogger logger = new ClientLogger(DetectLanguageAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code DetectLanguageAsyncClient} that sends requests to the Text Analytics services's detect language * endpoint. * * @param service The proxy service used to perform REST calls. */ DetectLanguageAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DetectedLanguage>> detectLanguageWithResponse(String text, String countryHint, Context context) { Objects.requireNonNull(text, "'text' cannot be null."); List<DetectLanguageInput> languageInputs = Collections.singletonList( Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( List<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final LanguageBatchInput languageBatchInput = new LanguageBatchInput() .setDocuments(textInputs.stream().map(detectLanguageInput -> new LanguageInput() .setId(detectLanguageInput.getId()).setText(detectLanguageInput.getText()) .setCountryHint(detectLanguageInput.getCountryHint())).collect(Collectors.toList())); return service.languagesWithRestResponseAsync( languageBatchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of language input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of detected language output - {}", response.getValue())) .doOnError(error -> logger.warning("Failed to detect language - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link LanguageResult} to {@link DocumentResultCollection}. * * @param languageResult the {@link LanguageResult} returned by the service. * * @return the {@link DocumentResultCollection} of {@link DetectLanguageResult} to be returned by the SDK. */ private static DocumentResultCollection<DetectLanguageResult> toDocumentResultCollection( final LanguageResult languageResult) { final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (DocumentLanguage documentLanguage : languageResult.getDocuments()) { DetectedLanguage primaryLanguage = null; List<com.azure.ai.textanalytics.implementation.models.DetectedLanguage> detectedLanguages = documentLanguage.getDetectedLanguages(); if (detectedLanguages.size() >= 1) { detectedLanguages.sort( Comparator.comparing(com.azure.ai.textanalytics.implementation.models.DetectedLanguage::getScore)); com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguageResult = detectedLanguages.get(0); primaryLanguage = new DetectedLanguage(detectedLanguageResult.getName(), detectedLanguageResult.getIso6391Name(), detectedLanguageResult.getScore()); } detectLanguageResults.add(new DetectLanguageResult(documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : Transforms.toTextDocumentStatistics(documentLanguage.getStatistics()), null, primaryLanguage)); } for (DocumentError documentError : languageResult.getErrors()) { com.azure.ai.textanalytics.models.TextAnalyticsError error = Transforms.toTextAnalyticsError(documentError.getError()); detectLanguageResults.add( new DetectLanguageResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : Transforms.toBatchStatistics(languageResult.getStatistics())); } }
ditto
public void createDocumentWithErrorClient() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.ERROR, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isErrorEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } }
Assert.assertTrue(logger.isErrorEnabled());
public void createDocumentWithErrorClient() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.ERROR, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isErrorEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } }
class LogLevelTest extends TestSuiteBase { public final static String COSMOS_DB_LOGGING_CATEGORY = "com.azure.data.cosmos"; public final static String NETWORK_LOGGING_CATEGORY = "com.azure.data.cosmos.netty-network"; public final static String LOG_PATTERN_1 = "HTTP/1.1 200 Ok."; public final static String LOG_PATTERN_2 = "| 0 1 2 3 4 5 6 7 8 9 a b c d e f |"; public final static String LOG_PATTERN_3 = "USER_EVENT: SslHandshakeCompletionEvent(SUCCESS)"; public final static String LOG_PATTERN_4 = "CONNECT: "; private static final String APPENDER_NAME = "StringWriterAppender"; private static CosmosContainer createdCollection; private static CosmosClient client; public LogLevelTest() { super(createGatewayRxDocumentClient()); } @BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT) public void beforeClass() { client = clientBuilder().build(); createdCollection = getSharedMultiPartitionCosmosContainer(client); } @AfterMethod(groups = { "simple" }) public void afterMethod() { resetLoggingConfiguration(); } @AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT) public void afterClass() { resetLoggingConfiguration(); } /** * This test will try to create document with netty wire DEBUG logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithDebugLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.DEBUG, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isDebugEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire WARN logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithWarningLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.WARN, APPENDER_NAME, consoleWriter); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire TRACE logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.TRACE, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isTraceEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Ignore @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(COSMOS_DB_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY); Assert.assertTrue(logger.isInfoEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithDebugLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final AppenderRef[] cosmosAppenderRef = new AppenderRef[] { AppenderRef.createAppenderRef("STDOUT", null, null) }; final LoggerConfig cosmosConfig = LoggerConfig.createLogger(false, Level.DEBUG, COSMOS_DB_LOGGING_CATEGORY, null, cosmosAppenderRef, null, configuration, null); configuration.addLogger(COSMOS_DB_LOGGING_CATEGORY, cosmosConfig); context.updateLoggers(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, APPENDER_NAME, false, true); appender.start(); org.apache.logging.log4j.core.Logger logger = context.getLogger(NETWORK_LOGGING_CATEGORY); logger.addAppender(appender); Assert.assertTrue(LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY).isDebugEnabled()); Assert.assertTrue(LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY).isInfoEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire ERROR logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) /** * This test will try to create document with netty wire INFO logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithInfoLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isInfoEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } private CosmosItemProperties getDocumentDefinition() { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties( String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}", uuid, uuid)); return doc; } /** * Resets the logging configuration. */ static void resetLoggingConfiguration() { final URL resource = LogLevelTest.class.getClassLoader().getResource("log4j2-test.properties"); Assert.assertNotNull(resource); final ConfigurationSource defaultConfigurationSource; try { defaultConfigurationSource = ConfigurationSource.fromUri(resource.toURI()); } catch (URISyntaxException e) { Assert.fail("Should have been able to load test properties from '" + resource + "'. Exception" + e ); return; } final Configuration defaultConfiguration = ConfigurationBuilderFactory.newConfigurationBuilder() .setConfigurationSource(defaultConfigurationSource) .build(); final LoggerContext oldContext = (LoggerContext) LogManager.getContext(false); oldContext.stop(); final LoggerContext context = Configurator.initialize(defaultConfiguration); Assert.assertNotSame(oldContext, context); } /** * Adds a {@link WriterAppender} associated with the given {@code loggerName} to the current logging configuration. * * @param loggerName Name of the logger to add. * @param logLevel Level for the logger to log at. * @param appenderName The name of the appender. * @param consoleWriter The {@link Writer} associated with the WriterAppender. */ static void addAppenderAndLogger(String loggerName, Level logLevel, String appenderName, Writer consoleWriter) { final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, appenderName, false, true); appender.start(); configuration.addAppender(appender); final AppenderRef[] appenderRefs = new AppenderRef[] { AppenderRef.createAppenderRef(appenderName, null, null) }; final LoggerConfig loggerConfiguration = LoggerConfig.createLogger(false, logLevel, loggerName, null, appenderRefs, null, configuration, null); configuration.addLogger(loggerName, loggerConfiguration); context.updateLoggers(); org.apache.logging.log4j.core.Logger logger = context.getLogger(loggerName); configuration.addLoggerAppender(logger, appender); } }
class LogLevelTest extends TestSuiteBase { public final static String COSMOS_DB_LOGGING_CATEGORY = "com.azure.data.cosmos"; public final static String NETWORK_LOGGING_CATEGORY = "com.azure.data.cosmos.netty-network"; public final static String LOG_PATTERN_1 = "HTTP/1.1 200 Ok."; public final static String LOG_PATTERN_2 = "| 0 1 2 3 4 5 6 7 8 9 a b c d e f |"; public final static String LOG_PATTERN_3 = "USER_EVENT: SslHandshakeCompletionEvent(SUCCESS)"; public final static String LOG_PATTERN_4 = "CONNECT: "; private static final String APPENDER_NAME = "StringWriterAppender"; private static CosmosContainer createdCollection; private static CosmosClient client; public LogLevelTest() { super(createGatewayRxDocumentClient()); } @BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT) public void beforeClass() { client = clientBuilder().build(); createdCollection = getSharedMultiPartitionCosmosContainer(client); } @AfterMethod(groups = { "simple" }) public void afterMethod() { resetLoggingConfiguration(); } @AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT) public void afterClass() { resetLoggingConfiguration(); } /** * This test will try to create document with netty wire DEBUG logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithDebugLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.DEBUG, APPENDER_NAME, consoleWriter); final Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isDebugEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire WARN logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithWarningLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.WARN, APPENDER_NAME, consoleWriter); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire TRACE logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.TRACE, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isTraceEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Ignore @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(COSMOS_DB_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY); assertThat(logger.isInfoEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithDebugLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final AppenderRef[] cosmosAppenderRef = new AppenderRef[] { AppenderRef.createAppenderRef("STDOUT", null, null) }; final LoggerConfig cosmosConfig = LoggerConfig.createLogger(false, Level.DEBUG, COSMOS_DB_LOGGING_CATEGORY, null, cosmosAppenderRef, null, configuration, null); configuration.addLogger(COSMOS_DB_LOGGING_CATEGORY, cosmosConfig); context.updateLoggers(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, APPENDER_NAME, false, true); appender.start(); org.apache.logging.log4j.core.Logger logger = context.getLogger(NETWORK_LOGGING_CATEGORY); logger.addAppender(appender); assertThat(LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY).isDebugEnabled()).isTrue(); assertThat(LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY).isInfoEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire ERROR logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) /** * This test will try to create document with netty wire INFO logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithInfoLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isInfoEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } private CosmosItemProperties getDocumentDefinition() { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties( String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}", uuid, uuid)); return doc; } /** * Resets the logging configuration. */ static void resetLoggingConfiguration() { final URL resource = LogLevelTest.class.getClassLoader().getResource("log4j2-test.properties"); assertThat(resource).isNotNull(); final ConfigurationSource defaultConfigurationSource; try { defaultConfigurationSource = ConfigurationSource.fromUri(resource.toURI()); } catch (URISyntaxException e) { fail("Should have been able to load test properties from: " + resource, e); return; } final Configuration defaultConfiguration = ConfigurationBuilderFactory.newConfigurationBuilder() .setConfigurationSource(defaultConfigurationSource) .build(); final LoggerContext oldContext = (LoggerContext) LogManager.getContext(false); oldContext.stop(); final LoggerContext context = Configurator.initialize(defaultConfiguration); assertThat(context).isNotSameAs(oldContext); } /** * Adds a {@link WriterAppender} associated with the given {@code loggerName} to the current logging configuration. * * @param loggerName Name of the logger to add. * @param logLevel Level for the logger to log at. * @param appenderName The name of the appender. * @param consoleWriter The {@link Writer} associated with the WriterAppender. */ static void addAppenderAndLogger(String loggerName, Level logLevel, String appenderName, Writer consoleWriter) { final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, appenderName, false, true); appender.start(); configuration.addAppender(appender); final AppenderRef[] appenderRefs = new AppenderRef[] { AppenderRef.createAppenderRef(appenderName, null, null) }; final LoggerConfig loggerConfiguration = LoggerConfig.createLogger(false, logLevel, loggerName, null, appenderRefs, null, configuration, null); configuration.addLogger(loggerName, loggerConfiguration); context.updateLoggers(); org.apache.logging.log4j.core.Logger logger = context.getLogger(loggerName); configuration.addLoggerAppender(logger, appender); } }
here and everywhere else please, ditto
public void createDocumentWithDebugLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.DEBUG, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isDebugEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } }
Assert.assertTrue(logger.isDebugEnabled());
public void createDocumentWithDebugLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.DEBUG, APPENDER_NAME, consoleWriter); final Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isDebugEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } }
class LogLevelTest extends TestSuiteBase { public final static String COSMOS_DB_LOGGING_CATEGORY = "com.azure.data.cosmos"; public final static String NETWORK_LOGGING_CATEGORY = "com.azure.data.cosmos.netty-network"; public final static String LOG_PATTERN_1 = "HTTP/1.1 200 Ok."; public final static String LOG_PATTERN_2 = "| 0 1 2 3 4 5 6 7 8 9 a b c d e f |"; public final static String LOG_PATTERN_3 = "USER_EVENT: SslHandshakeCompletionEvent(SUCCESS)"; public final static String LOG_PATTERN_4 = "CONNECT: "; private static final String APPENDER_NAME = "StringWriterAppender"; private static CosmosContainer createdCollection; private static CosmosClient client; public LogLevelTest() { super(createGatewayRxDocumentClient()); } @BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT) public void beforeClass() { client = clientBuilder().build(); createdCollection = getSharedMultiPartitionCosmosContainer(client); } @AfterMethod(groups = { "simple" }) public void afterMethod() { resetLoggingConfiguration(); } @AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT) public void afterClass() { resetLoggingConfiguration(); } /** * This test will try to create document with netty wire DEBUG logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) /** * This test will try to create document with netty wire WARN logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithWarningLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.WARN, APPENDER_NAME, consoleWriter); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire TRACE logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.TRACE, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isTraceEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Ignore @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(COSMOS_DB_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY); Assert.assertTrue(logger.isInfoEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithDebugLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final AppenderRef[] cosmosAppenderRef = new AppenderRef[] { AppenderRef.createAppenderRef("STDOUT", null, null) }; final LoggerConfig cosmosConfig = LoggerConfig.createLogger(false, Level.DEBUG, COSMOS_DB_LOGGING_CATEGORY, null, cosmosAppenderRef, null, configuration, null); configuration.addLogger(COSMOS_DB_LOGGING_CATEGORY, cosmosConfig); context.updateLoggers(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, APPENDER_NAME, false, true); appender.start(); org.apache.logging.log4j.core.Logger logger = context.getLogger(NETWORK_LOGGING_CATEGORY); logger.addAppender(appender); Assert.assertTrue(LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY).isDebugEnabled()); Assert.assertTrue(LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY).isInfoEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire ERROR logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithErrorClient() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.ERROR, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isErrorEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire INFO logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithInfoLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); Assert.assertTrue(logger.isInfoEnabled()); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } private CosmosItemProperties getDocumentDefinition() { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties( String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}", uuid, uuid)); return doc; } /** * Resets the logging configuration. */ static void resetLoggingConfiguration() { final URL resource = LogLevelTest.class.getClassLoader().getResource("log4j2-test.properties"); Assert.assertNotNull(resource); final ConfigurationSource defaultConfigurationSource; try { defaultConfigurationSource = ConfigurationSource.fromUri(resource.toURI()); } catch (URISyntaxException e) { Assert.fail("Should have been able to load test properties from '" + resource + "'. Exception" + e ); return; } final Configuration defaultConfiguration = ConfigurationBuilderFactory.newConfigurationBuilder() .setConfigurationSource(defaultConfigurationSource) .build(); final LoggerContext oldContext = (LoggerContext) LogManager.getContext(false); oldContext.stop(); final LoggerContext context = Configurator.initialize(defaultConfiguration); Assert.assertNotSame(oldContext, context); } /** * Adds a {@link WriterAppender} associated with the given {@code loggerName} to the current logging configuration. * * @param loggerName Name of the logger to add. * @param logLevel Level for the logger to log at. * @param appenderName The name of the appender. * @param consoleWriter The {@link Writer} associated with the WriterAppender. */ static void addAppenderAndLogger(String loggerName, Level logLevel, String appenderName, Writer consoleWriter) { final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, appenderName, false, true); appender.start(); configuration.addAppender(appender); final AppenderRef[] appenderRefs = new AppenderRef[] { AppenderRef.createAppenderRef(appenderName, null, null) }; final LoggerConfig loggerConfiguration = LoggerConfig.createLogger(false, logLevel, loggerName, null, appenderRefs, null, configuration, null); configuration.addLogger(loggerName, loggerConfiguration); context.updateLoggers(); org.apache.logging.log4j.core.Logger logger = context.getLogger(loggerName); configuration.addLoggerAppender(logger, appender); } }
class LogLevelTest extends TestSuiteBase { public final static String COSMOS_DB_LOGGING_CATEGORY = "com.azure.data.cosmos"; public final static String NETWORK_LOGGING_CATEGORY = "com.azure.data.cosmos.netty-network"; public final static String LOG_PATTERN_1 = "HTTP/1.1 200 Ok."; public final static String LOG_PATTERN_2 = "| 0 1 2 3 4 5 6 7 8 9 a b c d e f |"; public final static String LOG_PATTERN_3 = "USER_EVENT: SslHandshakeCompletionEvent(SUCCESS)"; public final static String LOG_PATTERN_4 = "CONNECT: "; private static final String APPENDER_NAME = "StringWriterAppender"; private static CosmosContainer createdCollection; private static CosmosClient client; public LogLevelTest() { super(createGatewayRxDocumentClient()); } @BeforeClass(groups = { "simple" }, timeOut = SETUP_TIMEOUT) public void beforeClass() { client = clientBuilder().build(); createdCollection = getSharedMultiPartitionCosmosContainer(client); } @AfterMethod(groups = { "simple" }) public void afterMethod() { resetLoggingConfiguration(); } @AfterClass(groups = { "simple" }, timeOut = SHUTDOWN_TIMEOUT) public void afterClass() { resetLoggingConfiguration(); } /** * This test will try to create document with netty wire DEBUG logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) /** * This test will try to create document with netty wire WARN logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithWarningLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.WARN, APPENDER_NAME, consoleWriter); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire TRACE logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.TRACE, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isTraceEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Ignore @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithTraceLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(COSMOS_DB_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY); assertThat(logger.isInfoEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_1); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_2); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_3); assertThat(consoleWriter.toString()).contains(LOG_PATTERN_4); } finally { safeClose(client); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithDebugLevelAtRoot() throws Exception { final StringWriter consoleWriter = new StringWriter(); final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final AppenderRef[] cosmosAppenderRef = new AppenderRef[] { AppenderRef.createAppenderRef("STDOUT", null, null) }; final LoggerConfig cosmosConfig = LoggerConfig.createLogger(false, Level.DEBUG, COSMOS_DB_LOGGING_CATEGORY, null, cosmosAppenderRef, null, configuration, null); configuration.addLogger(COSMOS_DB_LOGGING_CATEGORY, cosmosConfig); context.updateLoggers(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, APPENDER_NAME, false, true); appender.start(); org.apache.logging.log4j.core.Logger logger = context.getLogger(NETWORK_LOGGING_CATEGORY); logger.addAppender(appender); assertThat(LoggerFactory.getLogger(COSMOS_DB_LOGGING_CATEGORY).isDebugEnabled()).isTrue(); assertThat(LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY).isInfoEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire ERROR logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithErrorClient() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.ERROR, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isErrorEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } /** * This test will try to create document with netty wire INFO logging and * validate it. * * @throws Exception */ @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createDocumentWithInfoLevel() throws Exception { final StringWriter consoleWriter = new StringWriter(); addAppenderAndLogger(NETWORK_LOGGING_CATEGORY, Level.INFO, APPENDER_NAME, consoleWriter); Logger logger = LoggerFactory.getLogger(NETWORK_LOGGING_CATEGORY); assertThat(logger.isInfoEnabled()).isTrue(); CosmosClient client = clientBuilder().build(); try { CosmosItemProperties docDefinition = getDocumentDefinition(); Mono<CosmosItemResponse> createObservable = createdCollection.createItem(docDefinition, new CosmosItemRequestOptions()); CosmosResponseValidator<CosmosItemResponse> validator = new CosmosResponseValidator.Builder<CosmosItemResponse>() .withId(docDefinition.id()).build(); validateSuccess(createObservable, validator); assertThat(consoleWriter.toString()).isEmpty(); } finally { safeClose(client); } } private CosmosItemProperties getDocumentDefinition() { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties( String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}", uuid, uuid)); return doc; } /** * Resets the logging configuration. */ static void resetLoggingConfiguration() { final URL resource = LogLevelTest.class.getClassLoader().getResource("log4j2-test.properties"); assertThat(resource).isNotNull(); final ConfigurationSource defaultConfigurationSource; try { defaultConfigurationSource = ConfigurationSource.fromUri(resource.toURI()); } catch (URISyntaxException e) { fail("Should have been able to load test properties from: " + resource, e); return; } final Configuration defaultConfiguration = ConfigurationBuilderFactory.newConfigurationBuilder() .setConfigurationSource(defaultConfigurationSource) .build(); final LoggerContext oldContext = (LoggerContext) LogManager.getContext(false); oldContext.stop(); final LoggerContext context = Configurator.initialize(defaultConfiguration); assertThat(context).isNotSameAs(oldContext); } /** * Adds a {@link WriterAppender} associated with the given {@code loggerName} to the current logging configuration. * * @param loggerName Name of the logger to add. * @param logLevel Level for the logger to log at. * @param appenderName The name of the appender. * @param consoleWriter The {@link Writer} associated with the WriterAppender. */ static void addAppenderAndLogger(String loggerName, Level logLevel, String appenderName, Writer consoleWriter) { final LoggerContext context = (LoggerContext) LogManager.getContext(false); final Configuration configuration = context.getConfiguration(); final WriterAppender appender = WriterAppender.createAppender(PatternLayout.createDefaultLayout(configuration), null, consoleWriter, appenderName, false, true); appender.start(); configuration.addAppender(appender); final AppenderRef[] appenderRefs = new AppenderRef[] { AppenderRef.createAppenderRef(appenderName, null, null) }; final LoggerConfig loggerConfiguration = LoggerConfig.createLogger(false, logLevel, loggerName, null, appenderRefs, null, configuration, null); configuration.addLogger(loggerName, loggerConfiguration); context.updateLoggers(); org.apache.logging.log4j.core.Logger logger = context.getLogger(loggerName); configuration.addLoggerAppender(logger, appender); } }
Should this be in the bodyIntern method instead?
public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> { if (this.disableBufferCopy) { return byteBuf.nioBuffer(); } return deepCopyBuffer(byteBuf); }); }
}).map(byteBuf -> {
public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> this.disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf)); }
class ReactorNettyHttpResponse extends HttpResponse { private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override private ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = byteBuf.nioBuffer(); int offset = buffer.position(); int size = buffer.remaining(); byte[] duplicate = new byte[size]; for (int i = 0; i < size; i++) { duplicate[i] = buffer.get(i + offset); } return ByteBuffer.wrap(duplicate); } @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } }
class ReactorNettyHttpResponse extends HttpResponse { private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new byte[0]); private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = ByteBuffer.allocate(byteBuf.readableBytes()); byteBuf.readBytes(buffer); buffer.rewind(); return buffer; } }
As discussed, changing bodyIntern will result in double copy for other `getBody*` methods that don't return `Flux<ByteBuffer>`
public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> { if (this.disableBufferCopy) { return byteBuf.nioBuffer(); } return deepCopyBuffer(byteBuf); }); }
}).map(byteBuf -> {
public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> this.disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf)); }
class ReactorNettyHttpResponse extends HttpResponse { private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override private ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = byteBuf.nioBuffer(); int offset = buffer.position(); int size = buffer.remaining(); byte[] duplicate = new byte[size]; for (int i = 0; i < size; i++) { duplicate[i] = buffer.get(i + offset); } return ByteBuffer.wrap(duplicate); } @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } }
class ReactorNettyHttpResponse extends HttpResponse { private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new byte[0]); private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = ByteBuffer.allocate(byteBuf.readableBytes()); byteBuf.readBytes(buffer); buffer.rewind(); return buffer; } }
minor; maybe not now, but we could have a static empty ByteBuffer in class level and if size == 0, we can just return that, instead of creating a new empty `duplicate` reference every time.
private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = byteBuf.nioBuffer(); int offset = buffer.position(); int size = buffer.remaining(); byte[] duplicate = new byte[size]; for (int i = 0; i < size; i++) { duplicate[i] = buffer.get(i + offset); } return ByteBuffer.wrap(duplicate); }
int size = buffer.remaining();
private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = ByteBuffer.allocate(byteBuf.readableBytes()); byteBuf.readBytes(buffer); buffer.rewind(); return buffer; }
class ReactorNettyHttpResponse extends HttpResponse { private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> { if (this.disableBufferCopy) { return byteBuf.nioBuffer(); } return deepCopyBuffer(byteBuf); }); } @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } }
class ReactorNettyHttpResponse extends HttpResponse { private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new byte[0]); private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> this.disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf)); } @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } }
Reduce this to a one-liner: `return disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf);`
public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> { if (this.disableBufferCopy) { return byteBuf.nioBuffer(); } return deepCopyBuffer(byteBuf); }); }
return deepCopyBuffer(byteBuf);
public Flux<ByteBuffer> getBody() { return bodyIntern().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }).map(byteBuf -> this.disableBufferCopy ? byteBuf.nioBuffer() : deepCopyBuffer(byteBuf)); }
class ReactorNettyHttpResponse extends HttpResponse { private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = byteBuf.nioBuffer(); int offset = buffer.position(); int size = buffer.remaining(); byte[] duplicate = new byte[size]; for (int i = 0; i < size; i++) { duplicate[i] = buffer.get(i + offset); } return ByteBuffer.wrap(duplicate); } }
class ReactorNettyHttpResponse extends HttpResponse { private static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new byte[0]); private final HttpClientResponse reactorNettyResponse; private final Connection reactorNettyConnection; private final boolean disableBufferCopy; ReactorNettyHttpResponse(HttpClientResponse reactorNettyResponse, Connection reactorNettyConnection, HttpRequest httpRequest, boolean disableBufferCopy) { super(httpRequest); this.reactorNettyResponse = reactorNettyResponse; this.reactorNettyConnection = reactorNettyConnection; this.disableBufferCopy = disableBufferCopy; } @Override public int getStatusCode() { return reactorNettyResponse.status().code(); } @Override public String getHeaderValue(String name) { return reactorNettyResponse.responseHeaders().get(name); } @Override public HttpHeaders getHeaders() { HttpHeaders headers = new HttpHeaders(); reactorNettyResponse.responseHeaders().forEach(e -> headers.put(e.getKey(), e.getValue())); return headers; } @Override @Override public Mono<byte[]> getBodyAsByteArray() { return bodyIntern().aggregate().asByteArray().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString() { return bodyIntern().aggregate().asString().doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public Mono<String> getBodyAsString(Charset charset) { return bodyIntern().aggregate().asString(charset).doFinally(s -> { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } }); } @Override public void close() { if (!reactorNettyConnection.isDisposed()) { reactorNettyConnection.channel().eventLoop().execute(reactorNettyConnection::dispose); } } private ByteBufFlux bodyIntern() { return reactorNettyConnection.inbound().receive(); } Connection internConnection() { return reactorNettyConnection; } private static ByteBuffer deepCopyBuffer(ByteBuf byteBuf) { ByteBuffer buffer = ByteBuffer.allocate(byteBuf.readableBytes()); byteBuf.readBytes(buffer); buffer.rewind(); return buffer; } }
nit: Random newline change
public static HttpClient createInstance() { if (defaultProvider == null) { throw new IllegalStateException(CANNOT_FIND_HTTP_CLIENT); } return defaultProvider.createInstance(); }
return defaultProvider.createInstance();
public static HttpClient createInstance() { if (defaultProvider == null) { throw new IllegalStateException(CANNOT_FIND_HTTP_CLIENT); } return defaultProvider.createInstance(); }
class HttpClientProviders { private static HttpClientProvider defaultProvider; private static final String CANNOT_FIND_HTTP_CLIENT = "Cannot find any HttpClient provider on the classpath - unable to create a default HttpClient instance"; static { ServiceLoader<HttpClientProvider> serviceLoader = ServiceLoader.load(HttpClientProvider.class); Iterator<HttpClientProvider> it = serviceLoader.iterator(); if (it.hasNext()) { defaultProvider = it.next(); } } private HttpClientProviders() { } }
class HttpClientProviders { private static HttpClientProvider defaultProvider; private static final String CANNOT_FIND_HTTP_CLIENT = "Cannot find any HttpClient provider on the classpath - unable to create a default HttpClient instance"; static { ServiceLoader<HttpClientProvider> serviceLoader = ServiceLoader.load(HttpClientProvider.class); Iterator<HttpClientProvider> it = serviceLoader.iterator(); if (it.hasNext()) { defaultProvider = it.next(); } } private HttpClientProviders() { } }
Recursion should be avoided as much as we can because of obvious reasons. I believe this can easily be converted to a while loop.
private CosmosClientException extractCosmosClientExceptionIfAny(Throwable t) { if (t == null) { return null; } if (t instanceof CosmosClientException) { return (CosmosClientException) t; } return extractCosmosClientExceptionIfAny(t.getCause()); }
return extractCosmosClientExceptionIfAny(t.getCause());
private CosmosClientException extractCosmosClientExceptionIfAny(Throwable t) { if (t == null) { return null; } while(!(t instanceof CosmosClientException)) { t = t.getCause(); } return (CosmosClientException) t; }
class RetryAnalyzer extends RetryAnalyzerCount { private final Logger logger = LoggerFactory.getLogger(RetryAnalyzer.class); private final int waitBetweenRetriesInSeconds = 120; public RetryAnalyzer() { this.setCount(Integer.parseInt(TestConfigurations.MAX_RETRY_LIMIT)); } @Override public boolean retryMethod(ITestResult result) { try { int timeToWaitBeforeRetryInSeconds = getTimeToWaitInSeconds(result); TimeUnit.SECONDS.sleep(timeToWaitBeforeRetryInSeconds); } catch (InterruptedException e) { return false; } return true; } private int getTimeToWaitInSeconds(ITestResult result) { Throwable throwable = result.getThrowable(); CosmosClientException cosmosClientException = extractCosmosClientExceptionIfAny(throwable); if (cosmosClientException == null) { return waitBetweenRetriesInSeconds; } long retryAfterInMilliseconds = cosmosClientException.getRetryAfterInMilliseconds(); if (retryAfterInMilliseconds <= 0) { return waitBetweenRetriesInSeconds; } return Math.max(Math.toIntExact(Duration.ofMillis(retryAfterInMilliseconds).getSeconds()), waitBetweenRetriesInSeconds); } }
class RetryAnalyzer extends RetryAnalyzerCount { private final Logger logger = LoggerFactory.getLogger(RetryAnalyzer.class); private final int waitBetweenRetriesInSeconds = 120; public RetryAnalyzer() { this.setCount(Integer.parseInt(TestConfigurations.MAX_RETRY_LIMIT)); } @Override public boolean retryMethod(ITestResult result) { try { int timeToWaitBeforeRetryInSeconds = getTimeToWaitInSeconds(result); TimeUnit.SECONDS.sleep(timeToWaitBeforeRetryInSeconds); } catch (InterruptedException e) { return false; } return true; } private int getTimeToWaitInSeconds(ITestResult result) { Throwable throwable = result.getThrowable(); CosmosClientException cosmosClientException = extractCosmosClientExceptionIfAny(throwable); if (cosmosClientException == null) { return waitBetweenRetriesInSeconds; } long retryAfterInMilliseconds = cosmosClientException.getRetryAfterInMilliseconds(); if (retryAfterInMilliseconds <= 0) { return waitBetweenRetriesInSeconds; } return Math.max(Math.toIntExact(Duration.ofMillis(retryAfterInMilliseconds).getSeconds()), waitBetweenRetriesInSeconds); } }
why?
private CosmosClientException extractCosmosClientExceptionIfAny(Throwable t) { if (t == null) { return null; } if (t instanceof CosmosClientException) { return (CosmosClientException) t; } return extractCosmosClientExceptionIfAny(t.getCause()); }
return extractCosmosClientExceptionIfAny(t.getCause());
private CosmosClientException extractCosmosClientExceptionIfAny(Throwable t) { if (t == null) { return null; } while(!(t instanceof CosmosClientException)) { t = t.getCause(); } return (CosmosClientException) t; }
class RetryAnalyzer extends RetryAnalyzerCount { private final Logger logger = LoggerFactory.getLogger(RetryAnalyzer.class); private final int waitBetweenRetriesInSeconds = 120; public RetryAnalyzer() { this.setCount(Integer.parseInt(TestConfigurations.MAX_RETRY_LIMIT)); } @Override public boolean retryMethod(ITestResult result) { try { int timeToWaitBeforeRetryInSeconds = getTimeToWaitInSeconds(result); TimeUnit.SECONDS.sleep(timeToWaitBeforeRetryInSeconds); } catch (InterruptedException e) { return false; } return true; } private int getTimeToWaitInSeconds(ITestResult result) { Throwable throwable = result.getThrowable(); CosmosClientException cosmosClientException = extractCosmosClientExceptionIfAny(throwable); if (cosmosClientException == null) { return waitBetweenRetriesInSeconds; } long retryAfterInMilliseconds = cosmosClientException.getRetryAfterInMilliseconds(); if (retryAfterInMilliseconds <= 0) { return waitBetweenRetriesInSeconds; } return Math.max(Math.toIntExact(Duration.ofMillis(retryAfterInMilliseconds).getSeconds()), waitBetweenRetriesInSeconds); } }
class RetryAnalyzer extends RetryAnalyzerCount { private final Logger logger = LoggerFactory.getLogger(RetryAnalyzer.class); private final int waitBetweenRetriesInSeconds = 120; public RetryAnalyzer() { this.setCount(Integer.parseInt(TestConfigurations.MAX_RETRY_LIMIT)); } @Override public boolean retryMethod(ITestResult result) { try { int timeToWaitBeforeRetryInSeconds = getTimeToWaitInSeconds(result); TimeUnit.SECONDS.sleep(timeToWaitBeforeRetryInSeconds); } catch (InterruptedException e) { return false; } return true; } private int getTimeToWaitInSeconds(ITestResult result) { Throwable throwable = result.getThrowable(); CosmosClientException cosmosClientException = extractCosmosClientExceptionIfAny(throwable); if (cosmosClientException == null) { return waitBetweenRetriesInSeconds; } long retryAfterInMilliseconds = cosmosClientException.getRetryAfterInMilliseconds(); if (retryAfterInMilliseconds <= 0) { return waitBetweenRetriesInSeconds; } return Math.max(Math.toIntExact(Duration.ofMillis(retryAfterInMilliseconds).getSeconds()), waitBetweenRetriesInSeconds); } }
The text that needs to be analyzed.
public static void main(String[] args) { TextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); String text = "My SSN is 555-55-5555"; client.recognizePiiEntities(text).subscribe( entity -> System.out.printf( "Recognized personal identifiable information entity: %s, entity category: %s, entity sub-category: %s, offset: %s, length: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory() == null || entity.getSubCategory().isEmpty() ? "N/A" : entity.getSubCategory(), entity.getOffset(), entity.getLength(), entity.getScore()), error -> System.err.println("There was an error recognizing PII entities of the text." + error), () -> System.out.println("PII entities recognized.")); try { TimeUnit.SECONDS.sleep(5); } catch (InterruptedException ignored) { } }
public static void main(String[] args) { TextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); String text = "My SSN is 555-55-5555"; client.recognizePiiEntities(text).subscribe( entity -> System.out.printf( "Recognized personal identifiable information entity: %s, entity category: %s, entity sub-category: %s, offset: %s, length: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory() == null || entity.getSubCategory().isEmpty() ? "N/A" : entity.getSubCategory(), entity.getOffset(), entity.getLength(), entity.getScore()), error -> System.err.println("There was an error recognizing PII entities of the text." + error), () -> System.out.println("PII entities recognized.")); try { TimeUnit.SECONDS.sleep(5); } catch (InterruptedException ignored) { } }
class RecognizePiiAsync { /** * Main method to invoke this demo about how to recognize the PII entities of an input text. * * @param args Unused arguments to the program. */ }
class RecognizePiiAsync { /** * Main method to invoke this demo about how to recognize the PII entities of an input text. * * @param args Unused arguments to the program. */ }
Revert the keys and endpoint information from here
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("b2f8b7b697c348dcb0e30055d49f3d0f")) .endpoint("https: .buildClient(); List<TextDocumentInput> inputs = Arrays.asList( new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"), new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en") ); final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection<AnalyzeSentimentResult> sentimentBatchResult = client.analyzeSentimentBatchWithResponse( inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s%n", sentimentBatchResult.getModelVersion()); final TextDocumentBatchStatistics batchStatistics = sentimentBatchResult.getStatistics(); System.out.printf("A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n", batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(), batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : sentimentBatchResult) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); if (analyzeSentimentResult.isError()) { System.out.printf("Cannot analyze sentiment. Error: %s%n", analyzeSentimentResult.getError().getMessage()); continue; } final DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", documentSentiment.getSentiment(), documentSentiment.getSentimentScores().getPositive(), documentSentiment.getSentimentScores().getNeutral(), documentSentiment.getSentimentScores().getNegative()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f, length of sentence: %s, offset of sentence: %s.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getSentimentScores().getPositive(), sentenceSentiment.getSentimentScores().getNeutral(), sentenceSentiment.getSentimentScores().getNegative(), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); } } }
.apiKey(new TextAnalyticsApiKeyCredential("b2f8b7b697c348dcb0e30055d49f3d0f"))
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); List<TextDocumentInput> inputs = Arrays.asList( new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"), new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en") ); final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection<AnalyzeSentimentResult> sentimentBatchResult = client.analyzeSentimentBatchWithResponse( inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s%n", sentimentBatchResult.getModelVersion()); final TextDocumentBatchStatistics batchStatistics = sentimentBatchResult.getStatistics(); System.out.printf("A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n", batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(), batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : sentimentBatchResult) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); if (analyzeSentimentResult.isError()) { System.out.printf("Cannot analyze sentiment. Error: %s%n", analyzeSentimentResult.getError().getMessage()); continue; } final DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", documentSentiment.getSentiment(), documentSentiment.getSentimentScores().getPositive(), documentSentiment.getSentimentScores().getNeutral(), documentSentiment.getSentimentScores().getNegative()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f, length of sentence: %s, offset of sentence: %s.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getSentimentScores().getPositive(), sentenceSentiment.getSentimentScores().getNeutral(), sentenceSentiment.getSentimentScores().getNegative(), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); } } }
class AnalyzeSentimentBatchDocuments { /** * Main method to invoke this demo about how to analyze the sentiments of a batch input text. * * @param args Unused arguments to the program. */ }
class AnalyzeSentimentBatchDocuments { /** * Main method to invoke this demo about how to analyze the sentiments of a batch input text. * * @param args Unused arguments to the program. */ }
Yup. Found this when reviewed. Thanks for pointing this out
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("b2f8b7b697c348dcb0e30055d49f3d0f")) .endpoint("https: .buildClient(); List<TextDocumentInput> inputs = Arrays.asList( new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"), new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en") ); final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection<AnalyzeSentimentResult> sentimentBatchResult = client.analyzeSentimentBatchWithResponse( inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s%n", sentimentBatchResult.getModelVersion()); final TextDocumentBatchStatistics batchStatistics = sentimentBatchResult.getStatistics(); System.out.printf("A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n", batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(), batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : sentimentBatchResult) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); if (analyzeSentimentResult.isError()) { System.out.printf("Cannot analyze sentiment. Error: %s%n", analyzeSentimentResult.getError().getMessage()); continue; } final DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", documentSentiment.getSentiment(), documentSentiment.getSentimentScores().getPositive(), documentSentiment.getSentimentScores().getNeutral(), documentSentiment.getSentimentScores().getNegative()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f, length of sentence: %s, offset of sentence: %s.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getSentimentScores().getPositive(), sentenceSentiment.getSentimentScores().getNeutral(), sentenceSentiment.getSentimentScores().getNegative(), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); } } }
.apiKey(new TextAnalyticsApiKeyCredential("b2f8b7b697c348dcb0e30055d49f3d0f"))
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); List<TextDocumentInput> inputs = Arrays.asList( new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"), new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en") ); final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection<AnalyzeSentimentResult> sentimentBatchResult = client.analyzeSentimentBatchWithResponse( inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s%n", sentimentBatchResult.getModelVersion()); final TextDocumentBatchStatistics batchStatistics = sentimentBatchResult.getStatistics(); System.out.printf("A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n", batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(), batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : sentimentBatchResult) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); if (analyzeSentimentResult.isError()) { System.out.printf("Cannot analyze sentiment. Error: %s%n", analyzeSentimentResult.getError().getMessage()); continue; } final DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", documentSentiment.getSentiment(), documentSentiment.getSentimentScores().getPositive(), documentSentiment.getSentimentScores().getNeutral(), documentSentiment.getSentimentScores().getNegative()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f, length of sentence: %s, offset of sentence: %s.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getSentimentScores().getPositive(), sentenceSentiment.getSentimentScores().getNeutral(), sentenceSentiment.getSentimentScores().getNegative(), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); } } }
class AnalyzeSentimentBatchDocuments { /** * Main method to invoke this demo about how to analyze the sentiments of a batch input text. * * @param args Unused arguments to the program. */ }
class AnalyzeSentimentBatchDocuments { /** * Main method to invoke this demo about how to analyze the sentiments of a batch input text. * * @param args Unused arguments to the program. */ }
we need to rotate that key too
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("b2f8b7b697c348dcb0e30055d49f3d0f")) .endpoint("https: .buildClient(); List<TextDocumentInput> inputs = Arrays.asList( new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"), new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en") ); final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection<AnalyzeSentimentResult> sentimentBatchResult = client.analyzeSentimentBatchWithResponse( inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s%n", sentimentBatchResult.getModelVersion()); final TextDocumentBatchStatistics batchStatistics = sentimentBatchResult.getStatistics(); System.out.printf("A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n", batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(), batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : sentimentBatchResult) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); if (analyzeSentimentResult.isError()) { System.out.printf("Cannot analyze sentiment. Error: %s%n", analyzeSentimentResult.getError().getMessage()); continue; } final DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", documentSentiment.getSentiment(), documentSentiment.getSentimentScores().getPositive(), documentSentiment.getSentimentScores().getNeutral(), documentSentiment.getSentimentScores().getNegative()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f, length of sentence: %s, offset of sentence: %s.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getSentimentScores().getPositive(), sentenceSentiment.getSentimentScores().getNeutral(), sentenceSentiment.getSentimentScores().getNegative(), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); } } }
.apiKey(new TextAnalyticsApiKeyCredential("b2f8b7b697c348dcb0e30055d49f3d0f"))
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); List<TextDocumentInput> inputs = Arrays.asList( new TextDocumentInput("1", "The hotel was dark and unclean. The restaurant had amazing gnocchi.", "en"), new TextDocumentInput("2", "The restaurant had amazing gnocchi. The hotel was dark and unclean.", "en") ); final TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); DocumentResultCollection<AnalyzeSentimentResult> sentimentBatchResult = client.analyzeSentimentBatchWithResponse( inputs, requestOptions, Context.NONE).getValue(); System.out.printf("Model version: %s%n", sentimentBatchResult.getModelVersion()); final TextDocumentBatchStatistics batchStatistics = sentimentBatchResult.getStatistics(); System.out.printf("A batch of document statistics, document count: %s, erroneous document count: %s, transaction count: %s, valid document count: %s.%n", batchStatistics.getDocumentCount(), batchStatistics.getInvalidDocumentCount(), batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : sentimentBatchResult) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); if (analyzeSentimentResult.isError()) { System.out.printf("Cannot analyze sentiment. Error: %s%n", analyzeSentimentResult.getError().getMessage()); continue; } final DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", documentSentiment.getSentiment(), documentSentiment.getSentimentScores().getPositive(), documentSentiment.getSentimentScores().getNeutral(), documentSentiment.getSentimentScores().getNegative()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f, length of sentence: %s, offset of sentence: %s.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getSentimentScores().getPositive(), sentenceSentiment.getSentimentScores().getNeutral(), sentenceSentiment.getSentimentScores().getNegative(), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); } } }
class AnalyzeSentimentBatchDocuments { /** * Main method to invoke this demo about how to analyze the sentiments of a batch input text. * * @param args Unused arguments to the program. */ }
class AnalyzeSentimentBatchDocuments { /** * Main method to invoke this demo about how to analyze the sentiments of a batch input text. * * @param args Unused arguments to the program. */ }
nit: Extracted phrases, may be to match what with the rest.
public static void main(String[] args) { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildAsyncClient(); String text = "My cat might need to see a veterinarian."; client.extractKeyPhrases(text).subscribe( keyPhrase -> System.out.printf("Recognized phrases: %s.%n", keyPhrase), error -> System.err.println("There was an error extracting key phrases of the text." + error), () -> System.out.println("Key phrases extracted.")); credential.updateCredential("{valid_api_key}"); System.out.println("Recognized phrases:"); client.extractKeyPhrases(text).subscribe( keyPhrase -> System.out.printf("%s.%n", keyPhrase), error -> System.err.println("There was an error extracting key phrases of the text." + error), () -> System.out.println("Key phrases extracted.")); try { TimeUnit.SECONDS.sleep(5); } catch (InterruptedException ignored) { } }
System.out.println("Recognized phrases:");
public static void main(String[] args) { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildAsyncClient(); String text = "My cat might need to see a veterinarian."; System.out.println("Extracted phrases:"); client.extractKeyPhrases(text).subscribe( keyPhrase -> System.out.printf("%s.%n", keyPhrase), error -> System.err.println("There was an error extracting key phrases of the text." + error), () -> System.out.println("Key phrases extracted.")); credential.updateCredential("{valid_api_key}"); System.out.println("Extracted phrases:"); client.extractKeyPhrases(text).subscribe( keyPhrase -> System.out.printf("%s.%n", keyPhrase), error -> System.err.println("There was an error extracting key phrases of the text." + error), () -> System.out.println("Key phrases extracted.")); try { TimeUnit.SECONDS.sleep(5); } catch (InterruptedException ignored) { } }
class RotateApiKeyAsync { /** * Main method to invoke this demo about how to rotate the existing API key of text analytics client. * * @param args Unused arguments to the program. */ }
class RotateApiKeyAsync { /** * Main method to invoke this demo about how to rotate the existing API key of text analytics client. * * @param args Unused arguments to the program. */ }
Maybe we can turn it into a negative test case
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
.setLabelFilter("my-second*")
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarLabelFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringLabelFilter(); void listConfigurationSettingsSelectFieldsWithNotSupportedFilterRunner(String keyFilter, String labelFilter, Consumer<SettingSelector> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setKeyFilter(keyFilter) .setLabelFilter(labelFilter) .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); testRunner.accept(selector); } @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
if in the future, the service team start to support it again. Then we need to update the negative test case again?
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
.setLabelFilter("my-second*")
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarLabelFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringLabelFilter(); void listConfigurationSettingsSelectFieldsWithNotSupportedFilterRunner(String keyFilter, String labelFilter, Consumer<SettingSelector> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setKeyFilter(keyFilter) .setLabelFilter(labelFilter) .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); testRunner.accept(selector); } @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
This would be a service API change. So, I hope the API version would be ramped and we'd expect the old API version to continue throwing the 400.
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
.setLabelFilter("my-second*")
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarLabelFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringLabelFilter(); void listConfigurationSettingsSelectFieldsWithNotSupportedFilterRunner(String keyFilter, String labelFilter, Consumer<SettingSelector> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setKeyFilter(keyFilter) .setLabelFilter(labelFilter) .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); testRunner.accept(selector); } @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
make sense to me. I will add the negative test case
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
.setLabelFilter("my-second*")
void listConfigurationSettingsSelectFieldsRunner(BiFunction<List<ConfigurationSetting>, SettingSelector, Iterable<ConfigurationSetting>> testRunner) { final String label = "my-first-mylabel"; final String label2 = "my-second-mylabel"; final int numberToCreate = 8; final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setLabelFilter("my-second*") .setKeyFilter(keyPrefix + "-fetch-*") .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); List<ConfigurationSetting> settings = new ArrayList<>(numberToCreate); for (int value = 0; value < numberToCreate; value++) { String key = value % 2 == 0 ? keyPrefix + "-" + value : keyPrefix + "-fetch-" + value; String lbl = value / 4 == 0 ? label : label2; settings.add(new ConfigurationSetting().setKey(key).setValue("myValue2").setLabel(lbl).setTags(tags)); } for (ConfigurationSetting setting : testRunner.apply(settings, selector)) { assertNotNull(setting.getETag()); assertNotNull(setting.getKey()); assertTrue(setting.getKey().contains(keyPrefix)); assertNotNull(setting.getTags()); assertEquals(tags.size(), setting.getTags().size()); assertNull(setting.getLastModified()); assertNull(setting.getContentType()); assertNull(setting.getLabel()); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
class ConfigurationClientTestBase extends TestBase { private static final String AZURE_APPCONFIG_CONNECTION_STRING = "AZURE_APPCONFIG_CONNECTION_STRING"; private static final String KEY_PREFIX = "key"; private static final String LABEL_PREFIX = "label"; private static final int PREFIX_LENGTH = 8; private static final int RESOURCE_LENGTH = 16; static String connectionString; private final ClientLogger logger = new ClientLogger(ConfigurationClientTestBase.class); String keyPrefix; String labelPrefix; void beforeTestSetup() { keyPrefix = testResourceNamer.randomName(KEY_PREFIX, PREFIX_LENGTH); labelPrefix = testResourceNamer.randomName(LABEL_PREFIX, PREFIX_LENGTH); } <T> T clientSetup(Function<ConfigurationClientCredentials, T> clientBuilder) { if (CoreUtils.isNullOrEmpty(connectionString)) { connectionString = interceptorManager.isPlaybackMode() ? "Endpoint=http: : Configuration.getGlobalConfiguration().get(AZURE_APPCONFIG_CONNECTION_STRING); } Objects.requireNonNull(connectionString, "AZURE_APPCONFIG_CONNECTION_STRING expected to be set."); T client; try { client = clientBuilder.apply(new ConfigurationClientCredentials(connectionString)); } catch (InvalidKeyException | NoSuchAlgorithmException e) { logger.error("Could not create an configuration client credentials.", e); fail(); client = null; } return Objects.requireNonNull(client); } String getKey() { return testResourceNamer.randomName(keyPrefix, RESOURCE_LENGTH); } String getLabel() { return testResourceNamer.randomName(labelPrefix, RESOURCE_LENGTH); } @Test public abstract void addConfigurationSetting(); void addConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("MyTag", "TagValue"); tags.put("AnotherTag", "AnotherTagValue"); final ConfigurationSetting newConfiguration = new ConfigurationSetting() .setKey(getKey()) .setValue("myNewValue") .setTags(tags) .setContentType("text"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void addConfigurationSettingEmptyKey(); @Test public abstract void addConfigurationSettingEmptyValue(); void addConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void addConfigurationSettingNullKey(); @Test public abstract void addExistingSetting(); void addExistingSettingRunner(Consumer<ConfigurationSetting> testRunner) { final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(getKey()).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel(getLabel())); } @Test public abstract void setConfigurationSetting(); void setConfigurationSettingRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting setConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdatedValue"); testRunner.accept(setConfiguration, updateConfiguration); testRunner.accept(setConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingIfETag(); void setConfigurationSettingIfETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(key).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void setConfigurationSettingEmptyKey(); @Test public abstract void setConfigurationSettingEmptyValue(); void setConfigurationSettingEmptyValueRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); ConfigurationSetting setting = new ConfigurationSetting().setKey(key); ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key + "-1").setValue(""); testRunner.accept(setting); testRunner.accept(setting2); } @Test public abstract void setConfigurationSettingNullKey(); @Test public abstract void getConfigurationSetting(); void getConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); testRunner.accept(newConfiguration); testRunner.accept(newConfiguration.setLabel("myLabel")); } @Test public abstract void getConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSetting(); void deleteConfigurationSettingRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting deletableConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(deletableConfiguration); testRunner.accept(deletableConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNotFound(); @Test public abstract void deleteConfigurationSettingWithETag(); void deleteConfigurationSettingWithETagRunner(BiConsumer<ConfigurationSetting, ConfigurationSetting> testRunner) { String key = getKey(); String label = getLabel(); final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue("myNewValue"); final ConfigurationSetting updateConfiguration = new ConfigurationSetting().setKey(newConfiguration.getKey()).setValue("myUpdateValue"); testRunner.accept(newConfiguration, updateConfiguration); testRunner.accept(newConfiguration.setLabel(label), updateConfiguration.setLabel(label)); } @Test public abstract void deleteConfigurationSettingNullKey(); @Test public abstract void setReadOnly(); @Test public abstract void clearReadOnly(); @Test public abstract void setReadOnlyWithConfigurationSetting(); @Test public abstract void clearReadOnlyWithConfigurationSetting(); void lockUnlockRunner(Consumer<ConfigurationSetting> testRunner) { String key = getKey(); final ConfigurationSetting lockConfiguration = new ConfigurationSetting().setKey(key).setValue("myValue"); testRunner.accept(lockConfiguration); } @Test public abstract void listWithKeyAndLabel(); @Test public abstract void listWithMultipleKeys(); void listWithMultipleKeysRunner(String key, String key2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); testRunner.apply(setting, setting2).forEach(actual -> expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual)))); assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listWithMultipleLabels(); void listWithMultipleLabelsRunner(String key, String label, String label2, BiFunction<ConfigurationSetting, ConfigurationSetting, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(Arrays.asList(setting, setting2)); for (ConfigurationSetting actual : testRunner.apply(setting, setting2)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listConfigurationSettingsSelectFields(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringKeyFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithPrefixStarLabelFilter(); @Test public abstract void listConfigurationSettingsSelectFieldsWithSubstringLabelFilter(); void listConfigurationSettingsSelectFieldsWithNotSupportedFilterRunner(String keyFilter, String labelFilter, Consumer<SettingSelector> testRunner) { final Map<String, String> tags = new HashMap<>(); tags.put("tag1", "value1"); tags.put("tag2", "value2"); final SettingSelector selector = new SettingSelector() .setKeyFilter(keyFilter) .setLabelFilter(labelFilter) .setFields(SettingFields.KEY, SettingFields.ETAG, SettingFields.CONTENT_TYPE, SettingFields.TAGS); testRunner.accept(selector); } @Test public abstract void listConfigurationSettingsAcceptDateTime(); @Test public abstract void listRevisions(); static void validateListRevisions(ConfigurationSetting expected, ConfigurationSetting actual) { assertEquals(expected.getKey(), actual.getKey()); assertNotNull(actual.getETag()); assertNull(actual.getValue()); assertNull(actual.getLastModified()); } @Test public abstract void listRevisionsWithMultipleKeys(); void listRevisionsWithMultipleKeysRunner(String key, String key2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value"); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key2).setValue("value"); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsWithMultipleLabels(); void listRevisionsWithMultipleLabelsRunner(String key, String label, String label2, Function<List<ConfigurationSetting>, Iterable<ConfigurationSetting>> testRunner) { final ConfigurationSetting setting = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label); final ConfigurationSetting settingUpdate = new ConfigurationSetting().setKey(setting.getKey()).setLabel(setting.getLabel()).setValue("updatedValue"); final ConfigurationSetting setting2 = new ConfigurationSetting().setKey(key).setValue("value").setLabel(label2); final ConfigurationSetting setting2Update = new ConfigurationSetting().setKey(setting2.getKey()).setLabel(setting2.getLabel()).setValue("updatedValue"); final List<ConfigurationSetting> testInput = Arrays.asList(setting, settingUpdate, setting2, setting2Update); final Set<ConfigurationSetting> expectedSelection = new HashSet<>(testInput); for (ConfigurationSetting actual : testRunner.apply(testInput)) { expectedSelection.removeIf(expected -> equals(expected, cleanResponse(expected, actual))); } assertTrue(expectedSelection.isEmpty()); } @Test public abstract void listRevisionsAcceptDateTime(); @Test public abstract void listRevisionsWithPagination(); @Test public abstract void listConfigurationSettingsWithPagination(); @Test public abstract void listRevisionsWithPaginationAndRepeatStream(); @Test public abstract void listRevisionsWithPaginationAndRepeatIterator(); @Test public abstract void getConfigurationSettingWhenValueNotUpdated(); @Disabled("This test exists to clean up resources missed due to 429s.") @Test public abstract void deleteAllSettings(); @Test public abstract void addHeadersFromContextPolicyTest(); void addHeadersFromContextPolicyRunner(Consumer<ConfigurationSetting> testRunner) { final String key = getKey(); final String value = "newValue"; final ConfigurationSetting newConfiguration = new ConfigurationSetting().setKey(key).setValue(value); testRunner.accept(newConfiguration); } /** * Helper method to verify that the RestResponse matches what was expected. This method assumes a response status of 200. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned by the service, the body should contain a ConfigurationSetting */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response) { assertConfigurationEquals(expected, response, 200); } /** * Helper method to verify that the RestResponse matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param response RestResponse returned from the service, the body should contain a ConfigurationSetting * @param expectedStatusCode Expected HTTP status code returned by the service */ static void assertConfigurationEquals(ConfigurationSetting expected, Response<ConfigurationSetting> response, final int expectedStatusCode) { assertNotNull(response); assertEquals(expectedStatusCode, response.getStatusCode()); assertConfigurationEquals(expected, response.getValue()); } /** * Helper method to verify that the returned ConfigurationSetting matches what was expected. * * @param expected ConfigurationSetting expected to be returned by the service * @param actual ConfigurationSetting contained in the RestResponse body */ static void assertConfigurationEquals(ConfigurationSetting expected, ConfigurationSetting actual) { if (expected != null && actual != null) { actual = cleanResponse(expected, actual); } else if (expected == actual) { return; } else if (expected == null || actual == null) { assertFalse(true, "One of input settings is null"); } equals(expected, actual); } /** * The ConfigurationSetting has some fields that are only manipulated by the service, * this helper method cleans those fields on the setting returned by the service so tests are able to pass. * @param expected ConfigurationSetting expected to be returned by the service. * @param actual ConfigurationSetting returned by the service. */ private static ConfigurationSetting cleanResponse(ConfigurationSetting expected, ConfigurationSetting actual) { ConfigurationSetting cleanedActual = new ConfigurationSetting() .setKey(actual.getKey()) .setLabel(actual.getLabel()) .setValue(actual.getValue()) .setTags(actual.getTags()) .setContentType(actual.getContentType()) .setETag(expected.getETag()); try { Field lastModified = ConfigurationSetting.class.getDeclaredField("lastModified"); lastModified.setAccessible(true); lastModified.set(actual, expected.getLastModified()); } catch (NoSuchFieldException | IllegalAccessException ex) { } if (ConfigurationSetting.NO_LABEL.equals(expected.getLabel()) && actual.getLabel() == null) { cleanedActual.setLabel(ConfigurationSetting.NO_LABEL); } return cleanedActual; } static void assertRestException(Runnable exceptionThrower, int expectedStatusCode) { assertRestException(exceptionThrower, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Runnable exceptionThrower, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { try { exceptionThrower.run(); fail(); } catch (Throwable ex) { assertRestException(ex, expectedExceptionType, expectedStatusCode); } } /** * Helper method to verify the error was a HttpResponseException and it has a specific HTTP response code. * * @param exception Expected error thrown during the test * @param expectedStatusCode Expected HTTP status code contained in the error response */ static void assertRestException(Throwable exception, int expectedStatusCode) { assertRestException(exception, HttpResponseException.class, expectedStatusCode); } static void assertRestException(Throwable exception, Class<? extends HttpResponseException> expectedExceptionType, int expectedStatusCode) { assertEquals(expectedExceptionType, exception.getClass()); assertEquals(expectedStatusCode, ((HttpResponseException) exception).getResponse().getStatusCode()); } /** * Helper method to verify that a command throws an IllegalArgumentException. * * @param exceptionThrower Command that should throw the exception */ static <T> void assertRunnableThrowsException(Runnable exceptionThrower, Class<T> exception) { try { exceptionThrower.run(); fail(); } catch (Exception ex) { assertEquals(exception, ex.getClass()); } } /** * Helper method to verify that two configuration setting are equal. Users can defined their equal method. * * @param o1 ConfigurationSetting object 1 * @param o2 ConfigurationSetting object 2 * @return boolean value that defines if two ConfigurationSettings are equal */ static boolean equals(ConfigurationSetting o1, ConfigurationSetting o2) { if (o1 == o2) { return true; } if (!Objects.equals(o1.getKey(), o2.getKey()) || !Objects.equals(o1.getLabel(), o2.getLabel()) || !Objects.equals(o1.getValue(), o2.getValue()) || !Objects.equals(o1.getETag(), o2.getETag()) || !Objects.equals(o1.getLastModified(), o2.getLastModified()) || !Objects.equals(o1.isReadOnly(), o2.isReadOnly()) || !Objects.equals(o1.getContentType(), o2.getContentType()) || CoreUtils.isNullOrEmpty(o1.getTags()) != CoreUtils.isNullOrEmpty(o2.getTags())) { return false; } if (!CoreUtils.isNullOrEmpty(o1.getTags())) { return Objects.equals(o1.getTags(), o2.getTags()); } return true; } /** * A helper method to verify that two lists of ConfigurationSetting are equal each other. * * @param settings1 List of ConfigurationSetting * @param settings2 Another List of ConfigurationSetting * @return boolean value that defines if two ConfigurationSetting lists are equal */ static boolean equalsArray(List<ConfigurationSetting> settings1, List<ConfigurationSetting> settings2) { if (settings1 == settings2) { return true; } if (settings1 == null || settings2 == null) { return false; } if (settings1.size() != settings2.size()) { return false; } final int size = settings1.size(); for (int i = 0; i < size; i++) { if (!equals(settings1.get(i), settings2.get(i))) { return false; } } return true; } /** * Helper method that sets up HttpHeaders * * @return the http headers */ static HttpHeaders getCustomizedHeaders() { final String headerOne = "my-header1"; final String headerTwo = "my-header2"; final String headerThree = "my-header3"; final String headerOneValue = "my-header1-value"; final String headerTwoValue = "my-header2-value"; final String headerThreeValue = "my-header3-value"; final HttpHeaders headers = new HttpHeaders(); headers.put(headerOne, headerOneValue); headers.put(headerTwo, headerTwoValue); headers.put(headerThree, headerThreeValue); return headers; } /** * Helper method that check if the {@code headerContainer} contains {@code headers}. * * @param headers the headers that been checked * @param headerContainer The headers container that check if the {@code headers} exist in it. */ static void assertContainsHeaders(HttpHeaders headers, HttpHeaders headerContainer) { headers.stream().forEach(httpHeader -> assertEquals(headerContainer.getValue(httpHeader.getName()), httpHeader.getValue())); } }
are these constants anywhere?
public static BlobHttpHeaders extractHttpHeaders(List<FileAttribute<?>> fileAttributes, ClientLogger logger) { BlobHttpHeaders headers = new BlobHttpHeaders(); for (Iterator<FileAttribute<?>> it = fileAttributes.iterator(); it.hasNext();) { FileAttribute<?> attr = it.next(); boolean propertyFound = true; switch (attr.name()) { case "Content-Type": headers.setContentType(attr.value().toString()); break; case "Content-Language": headers.setContentLanguage(attr.value().toString()); break; case "Content-Disposition": headers.setContentDisposition(attr.value().toString()); break; case "Content-Encoding": headers.setContentEncoding(attr.value().toString()); break; case "Content-MD5": if ((attr.value() instanceof byte[])) { headers.setContentMd5((byte[]) attr.value()); } else { throw Utility.logError(logger, new UnsupportedOperationException("Content-MD5 attribute must be a byte[]")); } break; case "Cache-Control": headers.setCacheControl(attr.value().toString()); break; default: propertyFound = false; break; } if (propertyFound) { it.remove(); } } return headers; }
case "Content-Type":
public static BlobHttpHeaders extractHttpHeaders(List<FileAttribute<?>> fileAttributes, ClientLogger logger) { BlobHttpHeaders headers = new BlobHttpHeaders(); for (Iterator<FileAttribute<?>> it = fileAttributes.iterator(); it.hasNext();) { FileAttribute<?> attr = it.next(); boolean propertyFound = true; switch (attr.name()) { case AzureFileSystemProvider.CONTENT_TYPE: headers.setContentType(attr.value().toString()); break; case AzureFileSystemProvider.CONTENT_LANGUAGE: headers.setContentLanguage(attr.value().toString()); break; case AzureFileSystemProvider.CONTENT_DISPOSITION: headers.setContentDisposition(attr.value().toString()); break; case AzureFileSystemProvider.CONTENT_ENCODING: headers.setContentEncoding(attr.value().toString()); break; case AzureFileSystemProvider.CONTENT_MD5: if ((attr.value() instanceof byte[])) { headers.setContentMd5((byte[]) attr.value()); } else { throw Utility.logError(logger, new UnsupportedOperationException("Content-MD5 attribute must be a byte[]")); } break; case AzureFileSystemProvider.CACHE_CONTROL: headers.setCacheControl(attr.value().toString()); break; default: propertyFound = false; break; } if (propertyFound) { it.remove(); } } return headers; }
class Utility { public static <T extends Exception> T logError(ClientLogger logger, T e) { logger.error(e.getMessage()); return e; } /* Note that this will remove the properties from the list of attributes as it finds them. */ public static Map<String, String> convertAttributesToMetadata(List<FileAttribute<?>> fileAttributes) { Map<String, String> metadata = new HashMap<>(); for (FileAttribute<?> attr : fileAttributes) { metadata.put(attr.name(), attr.value().toString()); } return metadata.isEmpty() ? null : metadata; } }
class Utility { public static <T extends Exception> T logError(ClientLogger logger, T e) { logger.error(e.getMessage()); return e; } /* Note that this will remove the properties from the list of attributes as it finds them. */ public static Map<String, String> convertAttributesToMetadata(List<FileAttribute<?>> fileAttributes) { Map<String, String> metadata = new HashMap<>(); for (FileAttribute<?> attr : fileAttributes) { metadata.put(attr.name(), attr.value().toString()); } return metadata.isEmpty() ? null : metadata; } }
while loop might be more suitable?
public void queryItemsWithContinuationTokenAndPageSize() throws Exception{ List<String> actualIds = new ArrayList<>(); CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); String query = String.format("SELECT * from c where c.id in ('%s', '%s', '%s')", actualIds.get(0), actualIds.get(1), actualIds.get(2)); FeedOptions feedOptions = new FeedOptions(); String continuationToken = null; int pageSize = 1; int initialDocumentCount = 3; int finalDocumentCount = 0; CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); do { Iterable<FeedResponse<CosmosItemProperties>> feedResponseIterable = feedResponseIterator1.iterableByPage(continuationToken, pageSize); Iterator<FeedResponse<CosmosItemProperties>> feedResponse = feedResponseIterable.iterator(); for (; feedResponse.hasNext(); ) { FeedResponse<CosmosItemProperties> fr = feedResponse.next(); int resultSize = fr.getResults().size(); assertThat(resultSize).isEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while(continuationToken != null); assertThat(finalDocumentCount).isEqualTo(initialDocumentCount); }
for (; feedResponse.hasNext(); ) {
public void queryItemsWithContinuationTokenAndPageSize() throws Exception{ List<String> actualIds = new ArrayList<>(); CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); String query = String.format("SELECT * from c where c.id in ('%s', '%s', '%s')", actualIds.get(0), actualIds.get(1), actualIds.get(2)); FeedOptions feedOptions = new FeedOptions(); String continuationToken = null; int pageSize = 1; int initialDocumentCount = 3; int finalDocumentCount = 0; CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); do { Iterable<FeedResponse<CosmosItemProperties>> feedResponseIterable = feedResponseIterator1.iterableByPage(continuationToken, pageSize); for (FeedResponse<CosmosItemProperties> fr : feedResponseIterable) { int resultSize = fr.getResults().size(); assertThat(resultSize).isEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while(continuationToken != null); assertThat(finalDocumentCount).isEqualTo(initialDocumentCount); }
class CosmosItemTest extends TestSuiteBase { private CosmosClient client; private CosmosContainer container; @Factory(dataProvider = "clientBuilders") public CosmosItemTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); this.client = clientBuilder().buildClient(); CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient()); container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId()); } @AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); this.client.close(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); } @Test(groups = {"simple"}, timeOut = TIMEOUT) public void createItem_alreadyExists() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); try { container.createItem(properties, new CosmosItemRequestOptions()); } catch (Exception e) { assertThat(e).isInstanceOf(CosmosClientException.class); assertThat(((CosmosClientException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemResponse<CosmosItemProperties> readResponse1 = container.readItem(properties.getId(), new PartitionKey(properties.get("mypk")), new CosmosItemRequestOptions(), CosmosItemProperties.class); validateItemResponse(properties, readResponse1); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void replaceItem() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); String newPropValue = UUID.randomUUID().toString(); BridgeInternal.setProperty(properties, "newProp", newPropValue); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.setPartitionKey(new PartitionKey(properties.get("mypk"))); CosmosItemResponse<CosmosItemProperties> replace = container.replaceItem(properties, properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(replace.getProperties().get("newProp")).isEqualTo(newPropValue); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void deleteItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); CosmosItemResponse<CosmosItemProperties> deleteResponse = container.deleteItem(properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(deleteResponse.getStatusCode()).isEqualTo(204); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readAllItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.readAllItems(feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void queryItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); String query = String.format("SELECT * from c where c.id = '%s'", properties.getId()); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator1.iterator().hasNext()).isTrue(); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.queryItems(querySpec, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) private CosmosItemProperties getDocumentDefinition(String documentId) { final String uuid = UUID.randomUUID().toString(); final CosmosItemProperties properties = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , documentId, uuid)); return properties; } private void validateItemResponse(CosmosItemProperties containerProperties, CosmosItemResponse<CosmosItemProperties> createResponse) { assertThat(createResponse.getProperties().getId()).isNotNull(); assertThat(createResponse.getProperties().getId()) .as("check Resource Id") .isEqualTo(containerProperties.getId()); } }
class CosmosItemTest extends TestSuiteBase { private CosmosClient client; private CosmosContainer container; @Factory(dataProvider = "clientBuilders") public CosmosItemTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); this.client = clientBuilder().buildClient(); CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient()); container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId()); } @AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); this.client.close(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); } @Test(groups = {"simple"}, timeOut = TIMEOUT) public void createItem_alreadyExists() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); try { container.createItem(properties, new CosmosItemRequestOptions()); } catch (Exception e) { assertThat(e).isInstanceOf(CosmosClientException.class); assertThat(((CosmosClientException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemResponse<CosmosItemProperties> readResponse1 = container.readItem(properties.getId(), new PartitionKey(properties.get("mypk")), new CosmosItemRequestOptions(), CosmosItemProperties.class); validateItemResponse(properties, readResponse1); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void replaceItem() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); String newPropValue = UUID.randomUUID().toString(); BridgeInternal.setProperty(properties, "newProp", newPropValue); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.setPartitionKey(new PartitionKey(properties.get("mypk"))); CosmosItemResponse<CosmosItemProperties> replace = container.replaceItem(properties, properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(replace.getProperties().get("newProp")).isEqualTo(newPropValue); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void deleteItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); CosmosItemResponse<CosmosItemProperties> deleteResponse = container.deleteItem(properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(deleteResponse.getStatusCode()).isEqualTo(204); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readAllItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.readAllItems(feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void queryItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); String query = String.format("SELECT * from c where c.id = '%s'", properties.getId()); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator1.iterator().hasNext()).isTrue(); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.queryItems(querySpec, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) private CosmosItemProperties getDocumentDefinition(String documentId) { final String uuid = UUID.randomUUID().toString(); final CosmosItemProperties properties = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , documentId, uuid)); return properties; } private void validateItemResponse(CosmosItemProperties containerProperties, CosmosItemResponse<CosmosItemProperties> createResponse) { assertThat(createResponse.getProperties().getId()).isNotNull(); assertThat(createResponse.getProperties().getId()) .as("check Resource Id") .isEqualTo(containerProperties.getId()); } }
yeah, it will be. Stupid intelliJ suggested this :P I will update it.
public void queryItemsWithContinuationTokenAndPageSize() throws Exception{ List<String> actualIds = new ArrayList<>(); CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); String query = String.format("SELECT * from c where c.id in ('%s', '%s', '%s')", actualIds.get(0), actualIds.get(1), actualIds.get(2)); FeedOptions feedOptions = new FeedOptions(); String continuationToken = null; int pageSize = 1; int initialDocumentCount = 3; int finalDocumentCount = 0; CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); do { Iterable<FeedResponse<CosmosItemProperties>> feedResponseIterable = feedResponseIterator1.iterableByPage(continuationToken, pageSize); Iterator<FeedResponse<CosmosItemProperties>> feedResponse = feedResponseIterable.iterator(); for (; feedResponse.hasNext(); ) { FeedResponse<CosmosItemProperties> fr = feedResponse.next(); int resultSize = fr.getResults().size(); assertThat(resultSize).isEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while(continuationToken != null); assertThat(finalDocumentCount).isEqualTo(initialDocumentCount); }
for (; feedResponse.hasNext(); ) {
public void queryItemsWithContinuationTokenAndPageSize() throws Exception{ List<String> actualIds = new ArrayList<>(); CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); properties = getDocumentDefinition(UUID.randomUUID().toString()); container.createItem(properties); actualIds.add(properties.getId()); String query = String.format("SELECT * from c where c.id in ('%s', '%s', '%s')", actualIds.get(0), actualIds.get(1), actualIds.get(2)); FeedOptions feedOptions = new FeedOptions(); String continuationToken = null; int pageSize = 1; int initialDocumentCount = 3; int finalDocumentCount = 0; CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); do { Iterable<FeedResponse<CosmosItemProperties>> feedResponseIterable = feedResponseIterator1.iterableByPage(continuationToken, pageSize); for (FeedResponse<CosmosItemProperties> fr : feedResponseIterable) { int resultSize = fr.getResults().size(); assertThat(resultSize).isEqualTo(pageSize); finalDocumentCount += fr.getResults().size(); continuationToken = fr.getContinuationToken(); } } while(continuationToken != null); assertThat(finalDocumentCount).isEqualTo(initialDocumentCount); }
class CosmosItemTest extends TestSuiteBase { private CosmosClient client; private CosmosContainer container; @Factory(dataProvider = "clientBuilders") public CosmosItemTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); this.client = clientBuilder().buildClient(); CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient()); container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId()); } @AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); this.client.close(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); } @Test(groups = {"simple"}, timeOut = TIMEOUT) public void createItem_alreadyExists() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); try { container.createItem(properties, new CosmosItemRequestOptions()); } catch (Exception e) { assertThat(e).isInstanceOf(CosmosClientException.class); assertThat(((CosmosClientException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemResponse<CosmosItemProperties> readResponse1 = container.readItem(properties.getId(), new PartitionKey(properties.get("mypk")), new CosmosItemRequestOptions(), CosmosItemProperties.class); validateItemResponse(properties, readResponse1); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void replaceItem() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); String newPropValue = UUID.randomUUID().toString(); BridgeInternal.setProperty(properties, "newProp", newPropValue); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.setPartitionKey(new PartitionKey(properties.get("mypk"))); CosmosItemResponse<CosmosItemProperties> replace = container.replaceItem(properties, properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(replace.getProperties().get("newProp")).isEqualTo(newPropValue); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void deleteItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); CosmosItemResponse<CosmosItemProperties> deleteResponse = container.deleteItem(properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(deleteResponse.getStatusCode()).isEqualTo(204); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readAllItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.readAllItems(feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void queryItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); String query = String.format("SELECT * from c where c.id = '%s'", properties.getId()); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator1.iterator().hasNext()).isTrue(); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.queryItems(querySpec, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) private CosmosItemProperties getDocumentDefinition(String documentId) { final String uuid = UUID.randomUUID().toString(); final CosmosItemProperties properties = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , documentId, uuid)); return properties; } private void validateItemResponse(CosmosItemProperties containerProperties, CosmosItemResponse<CosmosItemProperties> createResponse) { assertThat(createResponse.getProperties().getId()).isNotNull(); assertThat(createResponse.getProperties().getId()) .as("check Resource Id") .isEqualTo(containerProperties.getId()); } }
class CosmosItemTest extends TestSuiteBase { private CosmosClient client; private CosmosContainer container; @Factory(dataProvider = "clientBuilders") public CosmosItemTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @BeforeClass(groups = {"simple"}, timeOut = SETUP_TIMEOUT) public void before_CosmosItemTest() { assertThat(this.client).isNull(); this.client = clientBuilder().buildClient(); CosmosAsyncContainer asyncContainer = getSharedMultiPartitionCosmosContainer(this.client.asyncClient()); container = client.getDatabase(asyncContainer.getDatabase().getId()).getContainer(asyncContainer.getId()); } @AfterClass(groups = {"simple"}, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { assertThat(this.client).isNotNull(); this.client.close(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void createItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); assertThat(itemResponse.getRequestCharge()).isGreaterThan(0); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); } @Test(groups = {"simple"}, timeOut = TIMEOUT) public void createItem_alreadyExists() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse1 = container.createItem(properties, new CosmosItemRequestOptions()); validateItemResponse(properties, itemResponse1); try { container.createItem(properties, new CosmosItemRequestOptions()); } catch (Exception e) { assertThat(e).isInstanceOf(CosmosClientException.class); assertThat(((CosmosClientException) e).getStatusCode()).isEqualTo(HttpConstants.StatusCodes.CONFLICT); } } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemResponse<CosmosItemProperties> readResponse1 = container.readItem(properties.getId(), new PartitionKey(properties.get("mypk")), new CosmosItemRequestOptions(), CosmosItemProperties.class); validateItemResponse(properties, readResponse1); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void replaceItem() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); validateItemResponse(properties, itemResponse); String newPropValue = UUID.randomUUID().toString(); BridgeInternal.setProperty(properties, "newProp", newPropValue); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); options.setPartitionKey(new PartitionKey(properties.get("mypk"))); CosmosItemResponse<CosmosItemProperties> replace = container.replaceItem(properties, properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(replace.getProperties().get("newProp")).isEqualTo(newPropValue); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void deleteItem() throws Exception { CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); CosmosItemRequestOptions options = new CosmosItemRequestOptions(); CosmosItemResponse<CosmosItemProperties> deleteResponse = container.deleteItem(properties.getId(), new PartitionKey(properties.get("mypk")), options); assertThat(deleteResponse.getStatusCode()).isEqualTo(204); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void readAllItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.readAllItems(feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void queryItems() throws Exception{ CosmosItemProperties properties = getDocumentDefinition(UUID.randomUUID().toString()); CosmosItemResponse<CosmosItemProperties> itemResponse = container.createItem(properties); String query = String.format("SELECT * from c where c.id = '%s'", properties.getId()); FeedOptions feedOptions = new FeedOptions(); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator1 = container.queryItems(query, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator1.iterator().hasNext()).isTrue(); SqlQuerySpec querySpec = new SqlQuerySpec(query); CosmosContinuablePagedIterable<CosmosItemProperties> feedResponseIterator3 = container.queryItems(querySpec, feedOptions, CosmosItemProperties.class); assertThat(feedResponseIterator3.iterator().hasNext()).isTrue(); } @Test(groups = { "simple" }, timeOut = TIMEOUT) private CosmosItemProperties getDocumentDefinition(String documentId) { final String uuid = UUID.randomUUID().toString(); final CosmosItemProperties properties = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , documentId, uuid)); return properties; } private void validateItemResponse(CosmosItemProperties containerProperties, CosmosItemResponse<CosmosItemProperties> createResponse) { assertThat(createResponse.getProperties().getId()).isNotNull(); assertThat(createResponse.getProperties().getId()) .as("check Resource Id") .isEqualTo(containerProperties.getId()); } }
Removed cache as per offline discussion
private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { final GlobalEndpointManager that = this; Callable<Mono<DatabaseAccount>> fetchDatabaseAccount = () -> { return that.owner.getDatabaseAccountFromEndpoint(serviceEndpoint).doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); }; Mono<DatabaseAccount> obsoleteValueMono = databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, fetchDatabaseAccount); return obsoleteValueMono.flatMap(obsoleteValue -> { if (firstTimeDatabaseAccountInitialization.compareAndSet(true, false)) { return Mono.just(obsoleteValue); } return databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, obsoleteValue, fetchDatabaseAccount).doOnError(t -> { databaseAccountAsyncCache.set(StringUtils.EMPTY, obsoleteValue); }); }); }
databaseAccountAsyncCache.set(StringUtils.EMPTY, obsoleteValue);
private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { return this.owner.getDatabaseAccountFromEndpoint(serviceEndpoint) .doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private final AsyncCache<String, DatabaseAccount> databaseAccountAsyncCache; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; this.databaseAccountAsyncCache = new AsyncCache<>(); try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } public Mono<DatabaseAccount> getDatabaseAccountFromCache(URI defaultEndpoint) { return this.databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, () -> this.owner.getDatabaseAccountFromEndpoint(defaultEndpoint).flatMap(databaseAccount -> { if (databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } Mono<Void> refreshLocationCompletable = this.refreshLocationAsync(databaseAccount, false); return refreshLocationCompletable.then(Mono.just(databaseAccount)); }).single()); } public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; } private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } public boolean isClosed() { return this.isClosed; } }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } /** * This will provide the latest databaseAccount. * If due to some reason last databaseAccount update was null, * this method will return previous valid value * @return DatabaseAccount */ public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; } private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } public boolean isClosed() { return this.isClosed; } }
Should be uploading `indexedDoc`. I'm getting paranoid now that we've made this mistake in many places and copy-pasted. 😬
public void getDynamicDocumentCannotAlwaysDetermineCorrectType() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); List<Document> rooms = new ArrayList<>(); rooms.add(new Document(Collections.singletonMap("baseRate", NaN))); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("HotelName", "2015-02-11T12:58:00Z"); indexedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); indexedDoc.put("Rooms", rooms); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("HotelName", OffsetDateTime.of(2015, 2, 11, 12, 58, 0, 9, ZoneOffset.UTC)); expectedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); expectedDoc.put("Rooms", Collections.singleton(new Document(Collections.singletonMap("BaseRate", "NaN")))); client.index(new IndexBatch<>().addUploadAction(expectedDoc)); assertEquals(client.getDocumentWithResponse("1", new ArrayList<>(indexedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); }
client.index(new IndexBatch<>().addUploadAction(expectedDoc));
public void getDynamicDocumentCannotAlwaysDetermineCorrectType() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("HotelName", "2015-02-11T12:58:00Z"); indexedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); indexedDoc.put("Rooms", Collections.singletonList(new Document(Collections.singletonMap("BaseRate", NaN)))); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("HotelName", OffsetDateTime.of(2015, 2, 11, 12, 58, 0, 0, ZoneOffset.UTC)); expectedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); expectedDoc.put("Rooms", Collections.singletonList(new Document(Collections.singletonMap("BaseRate", "NaN")))); client.index(new IndexBatch<>().addUploadAction(indexedDoc)); List<String> selectedFields = Arrays.asList("HotelId", "HotelName", "Location", "Rooms/BaseRate"); assertEquals(expectedDoc, client.getDocumentWithResponse("1", selectedFields, null, Context.NONE).getValue()); }
class LookupSyncTests extends LookupTestBase { private SearchIndexClient client; @Test public void canGetStaticallyTypedDocument() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareExpectedHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareEmptyHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithPascalCaseFields() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = preparePascalCaseFieldsHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canRoundtripStaticallyTypedPrimitiveCollections() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); ModelWithPrimitiveCollections expected = preparePrimitivesModel(); uploadDocument(client, expected); Document result = client.getDocument(expected.key()); ModelWithPrimitiveCollections actual = convertToType(result, ModelWithPrimitiveCollections.class); TestHelpers.assetModelsWithPrimitivesEqual(expected, actual); } @Test public void getStaticallyTypedDocumentSetsUnselectedFieldsToNull() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel indexedDoc = prepareSelectedFieldsHotel(); Hotel expected = new Hotel() .hotelName("Countryside Hotel") .description("Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.") .address(new HotelAddress().city("Durham")) .rooms(Arrays.asList(new HotelRoom().baseRate(2.44), new HotelRoom().baseRate(7.69))); uploadDocument(client, indexedDoc); List<String> selectedFields = Arrays.asList("Description", "HotelName", "Address/City", "Rooms/BaseRate"); Response<Document> response = client.getDocumentWithResponse(indexedDoc.hotelId(), selectedFields, generateRequestOptions(), Context.NONE); Hotel actual = convertToType(response.getValue(), Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetDynamicDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document expectedDoc = new Document() { { put("HotelId", "1"); put("HotelName", null); put("Tags", Collections.emptyList()); put("ParkingIncluded", null); put("LastRenovationDate", null); put("Rating", null); put("Location", null); put("Address", null); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, expectedDoc); List<String> selectedFields = Arrays.asList("HotelId", "HotelName", "Tags", "ParkingIncluded", "LastRenovationDate", "Rating", "Location", "Address", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void getDynamicDocumentWithEmptyObjectsReturnsObjectsFullOfNulls() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Address", new Document()); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Address", new Document() { { put("StreetAddress", null); put("City", null); put("StateProvince", null); put("Country", null); put("PostalCode", null); } }); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Address"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void emptyDynamicallyTypedPrimitiveCollectionsRoundtripAsObjectArrays() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "3"; Document originalDoc = new Document() { { put("Key", docKey); put("Dates", new Object[]{}); put("Doubles", new Double[]{}); put("Bools", new boolean[]{}); put("Longs", new Long[]{}); put("Strings", new String[]{}); put("Ints", new int[]{}); put("Points", new Object[]{}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Collections.emptyList()); put("Bools", Collections.emptyList()); put("Longs", Collections.emptyList()); put("Strings", Collections.emptyList()); put("Ints", Collections.emptyList()); put("Points", Collections.emptyList()); put("Dates", Collections.emptyList()); } }; uploadDocument(client, originalDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } @Test public void emptyDynamicObjectsInCollectionExpandedOnGetWhenCollectionFieldSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } }, new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Override @Override public void canGetDocumentWithBase64EncodedKey() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); String complexKey = Base64.getEncoder().encodeToString(new byte[]{1, 2, 3, 4, 5}); Document expectedDoc = new Document(); expectedDoc.put("HotelId", complexKey); client.index(new IndexBatch<>().addUploadAction(expectedDoc)); assertEquals(client.getDocumentWithResponse(complexKey, new ArrayList<>(expectedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); } @Override public void roundTrippingDateTimeOffsetNormalizesToUtc() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("LastRenovationDate", dateFormat.parse("2010-06-27T00:00:00-08:00")); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("LastRenovationDate", dateFormat.parse("2010-06-27T08:00:00Z")); client.index(new IndexBatch<>().addUploadAction(indexedDoc)); assertEquals(client.getDocument("1"), expectedDoc); } @Test public void emptyDynamicObjectsOmittedFromCollectionOnGetWhenSubFieldsSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void dynamicallyTypedPrimitiveCollectionsDoNotAllRoundtripCorrectly() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "1"; OffsetDateTime dateTime = OffsetDateTime.parse("2019-08-13T14:30:00Z"); GeoPoint geoPoint = GeoPoint.create(1.0, 100.0); Document indexedDoc = new Document() { { put("Key", docKey); put("Dates", new OffsetDateTime[]{dateTime}); put("Doubles", new Double[]{0.0, 5.8, POSITIVE_INFINITY, NEGATIVE_INFINITY, NaN}); put("Bools", new Boolean[]{true, false}); put("Longs", new Long[]{9999999999999999L, 832372345832523L}); put("Strings", new String[]{"hello", "bye"}); put("Ints", new int[]{1, 2, 3, 4, -13, 5, 0}); put("Points", new GeoPoint[]{geoPoint}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Arrays.asList(0.0, 5.8, "INF", "-INF", "NaN")); put("Bools", Arrays.asList(true, false)); put("Longs", Arrays.asList(9999999999999999L, 832372345832523L)); put("Strings", Arrays.asList("hello", "bye")); put("Ints", Arrays.asList(1, 2, 3, 4, -13, 5, 0)); put("Points", Collections.singletonList(geoPoint)); put("Dates", Collections.singletonList(dateTime)); } }; uploadDocument(client, indexedDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } }
class LookupSyncTests extends SearchIndexClientTestBase { private static final String INDEX_NAME = "hotels"; private SearchIndexClient client; @Test public void canGetStaticallyTypedDocument() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareExpectedHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareEmptyHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithPascalCaseFields() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = preparePascalCaseFieldsHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canRoundtripStaticallyTypedPrimitiveCollections() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); ModelWithPrimitiveCollections expected = preparePrimitivesModel(); uploadDocument(client, expected); Document result = client.getDocument(expected.key()); ModelWithPrimitiveCollections actual = convertToType(result, ModelWithPrimitiveCollections.class); TestHelpers.assetModelsWithPrimitivesEqual(expected, actual); } @Test public void getStaticallyTypedDocumentSetsUnselectedFieldsToNull() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel indexedDoc = prepareSelectedFieldsHotel(); Hotel expected = new Hotel() .hotelName("Countryside Hotel") .description("Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.") .address(new HotelAddress().city("Durham")) .rooms(Arrays.asList(new HotelRoom().baseRate(2.44), new HotelRoom().baseRate(7.69))); uploadDocument(client, indexedDoc); List<String> selectedFields = Arrays.asList("Description", "HotelName", "Address/City", "Rooms/BaseRate"); Response<Document> response = client.getDocumentWithResponse(indexedDoc.hotelId(), selectedFields, generateRequestOptions(), Context.NONE); Hotel actual = convertToType(response.getValue(), Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetDynamicDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document expectedDoc = new Document() { { put("HotelId", "1"); put("HotelName", null); put("Tags", Collections.emptyList()); put("ParkingIncluded", null); put("LastRenovationDate", null); put("Rating", null); put("Location", null); put("Address", null); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, expectedDoc); List<String> selectedFields = Arrays.asList("HotelId", "HotelName", "Tags", "ParkingIncluded", "LastRenovationDate", "Rating", "Location", "Address", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void getDynamicDocumentWithEmptyObjectsReturnsObjectsFullOfNulls() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Address", new Document()); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Address", new Document() { { put("StreetAddress", null); put("City", null); put("StateProvince", null); put("Country", null); put("PostalCode", null); } }); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Address"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void emptyDynamicallyTypedPrimitiveCollectionsRoundtripAsObjectArrays() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "3"; Document originalDoc = new Document() { { put("Key", docKey); put("Dates", new Object[]{}); put("Doubles", new Double[]{}); put("Bools", new boolean[]{}); put("Longs", new Long[]{}); put("Strings", new String[]{}); put("Ints", new int[]{}); put("Points", new Object[]{}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Collections.emptyList()); put("Bools", Collections.emptyList()); put("Longs", Collections.emptyList()); put("Strings", Collections.emptyList()); put("Ints", Collections.emptyList()); put("Points", Collections.emptyList()); put("Dates", Collections.emptyList()); } }; uploadDocument(client, originalDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } @Test public void emptyDynamicObjectsInCollectionExpandedOnGetWhenCollectionFieldSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } }, new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test @Test public void canGetDocumentWithBase64EncodedKey() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); String complexKey = Base64.getEncoder().encodeToString(new byte[]{1, 2, 3, 4, 5}); Document expectedDoc = new Document(); expectedDoc.put("HotelId", complexKey); client.index(new IndexBatch<>().addUploadAction(expectedDoc)); assertEquals(client.getDocumentWithResponse(complexKey, new ArrayList<>(expectedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); } @Test public void roundTrippingDateTimeOffsetNormalizesToUtc() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T00:00:00-08:00", DateTimeFormatter.ISO_DATE_TIME)); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T08:00:00Z", DateTimeFormatter.ISO_DATE_TIME)); client.index(new IndexBatch<>().addUploadAction(indexedDoc)); assertEquals(client.getDocumentWithResponse("1", new ArrayList<>(expectedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); } @Test public void emptyDynamicObjectsOmittedFromCollectionOnGetWhenSubFieldsSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void dynamicallyTypedPrimitiveCollectionsDoNotAllRoundtripCorrectly() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "1"; OffsetDateTime dateTime = OffsetDateTime.parse("2019-08-13T14:30:00Z"); GeoPoint geoPoint = GeoPoint.create(1.0, 100.0); Document indexedDoc = new Document() { { put("Key", docKey); put("Dates", new OffsetDateTime[]{dateTime}); put("Doubles", new Double[]{0.0, 5.8, POSITIVE_INFINITY, NEGATIVE_INFINITY, NaN}); put("Bools", new Boolean[]{true, false}); put("Longs", new Long[]{9999999999999999L, 832372345832523L}); put("Strings", new String[]{"hello", "bye"}); put("Ints", new int[]{1, 2, 3, 4, -13, 5, 0}); put("Points", new GeoPoint[]{geoPoint}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Arrays.asList(0.0, 5.8, "INF", "-INF", "NaN")); put("Bools", Arrays.asList(true, false)); put("Longs", Arrays.asList(9999999999999999L, 832372345832523L)); put("Strings", Arrays.asList("hello", "bye")); put("Ints", Arrays.asList(1, 2, 3, 4, -13, 5, 0)); put("Points", Collections.singletonList(geoPoint)); put("Dates", Collections.singletonList(dateTime)); } }; uploadDocument(client, indexedDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } Hotel prepareExpectedHotel() throws ParseException { return new Hotel().hotelId("1") .hotelName("Fancy Stay") .description("Best hotel in town if you like luxury hotels. They have an amazing infinity pool, a spa, and a really helpful concierge. The location is perfect -- right downtown, close to all the tourist attractions. We highly recommend this hotel.") .descriptionFr("Meilleur hôtel en ville si vous aimez les hôtels de luxe. Ils ont une magnifique piscine à débordement, un spa et un concierge très utile. L'emplacement est parfait – en plein centre, à proximité de toutes les attractions touristiques. Nous recommandons fortement cet hôtel.") .category("Luxury") .tags(Arrays.asList("pool", "view", "wifi", "concierge")) .parkingIncluded(false) .smokingAllowed(false) .lastRenovationDate(DATE_FORMAT.parse("2010-06-27T00:00:00Z")) .rating(5) .location(GeoPoint.create(47.678581, -122.131577)) .rooms(new ArrayList<>()); } Hotel prepareEmptyHotel() { return new Hotel().hotelId("1") .tags(new ArrayList<>()) .rooms(Collections.singletonList( new HotelRoom().tags(new ArrayList<>()) )); } Hotel preparePascalCaseFieldsHotel() { return new Hotel().hotelId("123").hotelName("Lord of the Rings").description("J.R.R").descriptionFr("Tolkien"); } Hotel prepareSelectedFieldsHotel() throws ParseException { return new Hotel() .hotelId("2") .hotelName("Countryside Hotel") .description("Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.") .descriptionFr("Économisez jusqu'à 50% sur les hôtels traditionnels. WiFi gratuit, très bien situé près du centre-ville, cuisine complète, laveuse & sécheuse, support 24/7, bowling, centre de fitness et plus encore.") .category("Budget") .tags(Arrays.asList("24-hour front desk service", "coffee in lobby", "restaurant")) .parkingIncluded(false) .smokingAllowed(true) .lastRenovationDate(DATE_FORMAT.parse("2010-06-27T00:00:00Z")) .rating(3) .location(GeoPoint.create(35.904160, -78.940483)) .address(new HotelAddress().streetAddress("6910 Fayetteville Rd").city("Durham").stateProvince("NC").country("USA").postalCode("27713")) .rooms(Arrays.asList( new HotelRoom() .description("Suite, 1 King Bed (Amenities)") .descriptionFr("Suite, 1 très grand lit (Services)") .type("Suite") .baseRate(2.44) .bedOptions("1 King Bed") .sleepsCount(2) .smokingAllowed(true) .tags(Collections.singletonList("coffee maker")), new HotelRoom() .description("Budget Room, 1 Queen Bed (Amenities)") .descriptionFr("Chambre Économique, 1 grand lit (Services)") .type("Budget Room") .baseRate(7.69) .bedOptions("1 Queen Bed") .sleepsCount(2) .smokingAllowed(false) .tags(Collections.singletonList("coffee maker")))); } ModelWithPrimitiveCollections preparePrimitivesModel() { return new ModelWithPrimitiveCollections() .key("1") .bools(new Boolean[]{true, false}) .dates(new OffsetDateTime[]{ OffsetDateTime.parse("2019-04-14T14:24:00Z"), OffsetDateTime.parse("1999-12-31T23:59:59Z")}) .doubles(new Double[]{NEGATIVE_INFINITY, 0.0, 2.78, NaN, 3.14, POSITIVE_INFINITY}) .ints(new int[]{1, 2, 3, 4, -13, 5, 0}) .longs(new Long[]{-9_999_999_999_999_999L, 832_372_345_832_523L}) .points(new GeoPoint[]{ GeoPoint.create(49.0, -67.0), GeoPoint.create(47.0, 21.0)}) .strings(new String[]{"hello", "2019-04-14T14:56:00-07:00"}); } String setupIndexWithDataTypes() { Index index = new Index() .setName("data-types-tests-index") .setFields(Arrays.asList( new Field() .setName("Key") .setType(DataType.EDM_STRING) .setKey(true) .setRetrievable(true), new Field() .setName("Bools") .setType(DataType.Collection(DataType.EDM_BOOLEAN)) .setRetrievable(true), new Field() .setName("Dates") .setType(DataType.Collection(DataType.EDM_DATE_TIME_OFFSET)) .setRetrievable(true), new Field() .setName("Doubles") .setType(DataType.Collection(DataType.EDM_DOUBLE)) .setRetrievable(true), new Field() .setName("Points") .setType(DataType.Collection(DataType.EDM_GEOGRAPHY_POINT)) .setRetrievable(true), new Field() .setName("Ints") .setType(DataType.Collection(DataType.EDM_INT32)) .setRetrievable(true), new Field() .setName("Longs") .setType(DataType.Collection(DataType.EDM_INT64)) .setRetrievable(true), new Field() .setName("Strings") .setType(DataType.Collection(DataType.EDM_STRING)) .setRetrievable(true) )); setupIndex(index); return index.getName(); } }
Ah, right. Making changes in UserAgent one. Forgot to update here. Thanks!
private void getKeyClient(HttpClient httpClient, KeyServiceVersion serviceVersion) { HttpPipeline httpPipeline = getHttpPipeline(httpClient, serviceVersion); client = new KeyClientBuilder() .vaultUrl(getEndpoint()) .pipeline(httpPipeline) .buildClient(); }
.buildClient();
private void getKeyClient(HttpClient httpClient, KeyServiceVersion serviceVersion) { HttpPipeline httpPipeline = getHttpPipeline(httpClient, serviceVersion); client = new KeyClientBuilder() .vaultUrl(getEndpoint()) .pipeline(httpPipeline) .serviceVersion(serviceVersion) .buildClient(); }
class KeyClientTest extends KeyClientTestBase { private KeyClient client; @Override protected void beforeTest() { beforeTestSetup(); } /** * Tests that a key can be created in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); } /** * Tests that an attempt to create a key with empty string name throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyEmptyName(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.createKey("", KeyType.RSA), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that we cannot create keys when key type is null. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNullType(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); setKeyEmptyValueRunner((key) -> { assertRestException(() -> client.createKey(key.getName(), key.getKeyType()), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); }); } /** * Verifies that an exception is thrown when null key object is passed for creation. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNull(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); } /** * Tests that a key is able to be updated when it exists. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); updateKeyRunner((original, updated) -> { assertKeyEquals(original, client.createKey(original)); KeyVaultKey keyToUpdate = client.getKey(original.getName()); client.updateKeyProperties(keyToUpdate.getProperties().setExpiresOn(updated.getExpiresOn())); assertKeyEquals(updated, client.getKey(original.getName())); }); } /** * Tests that a key is able to be updated when it is disabled. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateDisabledKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); updateDisabledKeyRunner((original, updated) -> { assertKeyEquals(original, client.createKey(original)); KeyVaultKey keyToUpdate = client.getKey(original.getName()); client.updateKeyProperties(keyToUpdate.getProperties().setExpiresOn(updated.getExpiresOn())); assertKeyEquals(updated, client.getKey(original.getName())); }); } /** * Tests that an existing key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); getKeyRunner((original) -> { client.createKey(original); assertKeyEquals(original, client.getKey(original.getName())); }); } /** * Tests that a specific version of the key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeySpecificVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); getKeySpecificVersionRunner((key, keyWithNewVal) -> { KeyVaultKey keyVersionOne = client.createKey(key); KeyVaultKey keyVersionTwo = client.createKey(keyWithNewVal); assertKeyEquals(key, client.getKey(keyVersionOne.getName(), keyVersionOne.getProperties().getVersion())); assertKeyEquals(keyWithNewVal, client.getKey(keyVersionTwo.getName(), keyVersionTwo.getProperties().getVersion())); }); } /** * Tests that an attempt to get a non-existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an existing key can be deleted. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); deleteKeyRunner((keyToDelete) -> { assertKeyEquals(keyToDelete, client.createKey(keyToDelete)); SyncPoller<DeletedKey, Void> deletedKeyPoller = client.beginDeleteKey(keyToDelete.getName()); PollResponse<DeletedKey> pollResponse = deletedKeyPoller.poll(); DeletedKey deletedKey = pollResponse.getValue(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(2000); pollResponse = deletedKeyPoller.poll(); } assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDelete.getName(), deletedKey.getName()); client.purgeDeletedKey(keyToDelete.getName()); pollOnKeyPurge(keyToDelete.getName()); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginDeleteKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an attempt to retrieve a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a deleted key can be recovered on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); recoverDeletedKeyRunner((keyToDeleteAndRecover) -> { assertKeyEquals(keyToDeleteAndRecover, client.createKey(keyToDeleteAndRecover)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndRecover.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } assertNotNull(pollResponse.getValue()); SyncPoller<KeyVaultKey, Void> recoverPoller = client.beginRecoverDeletedKey(keyToDeleteAndRecover.getName()); PollResponse<KeyVaultKey> recoverPollResponse = recoverPoller.poll(); KeyVaultKey recoveredKey = recoverPollResponse.getValue(); recoverPollResponse = recoverPoller.poll(); while (!recoverPollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); recoverPollResponse = recoverPoller.poll(); } assertEquals(keyToDeleteAndRecover.getName(), recoveredKey.getName()); assertEquals(keyToDeleteAndRecover.getNotBefore(), recoveredKey.getProperties().getNotBefore()); assertEquals(keyToDeleteAndRecover.getExpiresOn(), recoveredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to recover a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginRecoverDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); backupKeyRunner((keyToBackup) -> { assertKeyEquals(keyToBackup, client.createKey(keyToBackup)); byte[] backupBytes = (client.backupKey(keyToBackup.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }); } /** * Tests that an attempt to backup a non existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.backupKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); restoreKeyRunner((keyToBackupAndRestore) -> { assertKeyEquals(keyToBackupAndRestore, client.createKey(keyToBackupAndRestore)); byte[] backupBytes = (client.backupKey(keyToBackupAndRestore.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToBackupAndRestore.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } client.purgeDeletedKey(keyToBackupAndRestore.getName()); pollOnKeyPurge(keyToBackupAndRestore.getName()); sleepInRecordMode(60000); KeyVaultKey restoredKey = client.restoreKeyBackup(backupBytes); assertEquals(keyToBackupAndRestore.getName(), restoredKey.getName()); assertEquals(keyToBackupAndRestore.getExpiresOn(), restoredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to restore a key from malformed backup bytes throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKeyFromMalformedBackup(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); byte[] keyBackupBytes = "non-existing".getBytes(); assertRestException(() -> client.restoreKeyBackup(keyBackupBytes), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); listKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToList = keys; for (CreateKeyOptions key : keysToList.values()) { assertKeyEquals(key, client.createKey(key)); sleepInRecordMode(5000); } for (KeyProperties actualKey : client.listPropertiesOfKeys()) { if (keys.containsKey(actualKey.getName())) { CreateKeyOptions expectedKey = keys.get(actualKey.getName()); assertEquals(expectedKey.getExpiresOn(), actualKey.getExpiresOn()); assertEquals(expectedKey.getNotBefore(), actualKey.getNotBefore()); keys.remove(actualKey.getName()); } } assertEquals(0, keys.size()); }); } /** * Tests that a deleted key can be retrieved on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); getDeletedKeyRunner((keyToDeleteAndGet) -> { assertKeyEquals(keyToDeleteAndGet, client.createKey(keyToDeleteAndGet)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndGet.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } sleepInRecordMode(30000); DeletedKey deletedKey = client.getDeletedKey(keyToDeleteAndGet.getName()); assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDeleteAndGet.getName(), deletedKey.getName()); client.purgeDeletedKey(keyToDeleteAndGet.getName()); pollOnKeyPurge(keyToDeleteAndGet.getName()); sleepInRecordMode(10000); }); } /** * Tests that deleted keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listDeletedKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); listDeletedKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToDelete = keys; for (CreateKeyOptions key : keysToDelete.values()) { assertKeyEquals(key, client.createKey(key)); } for (CreateKeyOptions key : keysToDelete.values()) { SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(key.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } } sleepInRecordMode(300000); Iterable<DeletedKey> deletedKeys = client.listDeletedKeys(); assertTrue(deletedKeys.iterator().hasNext()); for (DeletedKey deletedKey : deletedKeys) { assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); } }); } /** * Tests that key versions can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeyVersions(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); listKeyVersionsRunner((keys) -> { List<CreateKeyOptions> keyVersions = keys; String keyName = null; for (CreateKeyOptions key : keyVersions) { keyName = key.getName(); assertKeyEquals(key, client.createKey(key)); } Iterable<KeyProperties> keyVersionsOutput = client.listPropertiesOfKeyVersions(keyName); List<KeyProperties> keyVersionsList = new ArrayList<>(); keyVersionsOutput.forEach(keyVersionsList::add); assertEquals(keyVersions.size(), keyVersionsList.size()); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyName); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } client.purgeDeletedKey(keyName); pollOnKeyPurge(keyName); }); } private DeletedKey pollOnKeyDeletion(String keyName) { int pendingPollCount = 0; while (pendingPollCount < 30) { DeletedKey deletedKey = null; try { deletedKey = client.getDeletedKey(keyName); } catch (ResourceNotFoundException e) { } if (deletedKey == null) { sleepInRecordMode(2000); pendingPollCount += 1; continue; } else { return deletedKey; } } System.err.printf("Deleted Key %s not found \n", keyName); return null; } private DeletedKey pollOnKeyPurge(String keyName) { int pendingPollCount = 0; while (pendingPollCount < 10) { DeletedKey deletedKey = null; try { deletedKey = client.getDeletedKey(keyName); } catch (ResourceNotFoundException e) { } if (deletedKey != null) { sleepInRecordMode(2000); pendingPollCount += 1; continue; } else { return deletedKey; } } System.err.printf("Deleted Key %s was not purged \n", keyName); return null; } }
class KeyClientTest extends KeyClientTestBase { private KeyClient client; @Override protected void beforeTest() { beforeTestSetup(); } /** * Tests that a key can be created in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); } /** * Tests that an attempt to create a key with empty string name throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyEmptyName(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.createKey("", KeyType.RSA), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that we cannot create keys when key type is null. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNullType(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); setKeyEmptyValueRunner((key) -> { assertRestException(() -> client.createKey(key.getName(), key.getKeyType()), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); }); } /** * Verifies that an exception is thrown when null key object is passed for creation. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNull(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); } /** * Tests that a key is able to be updated when it exists. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); updateKeyRunner((original, updated) -> { assertKeyEquals(original, client.createKey(original)); KeyVaultKey keyToUpdate = client.getKey(original.getName()); client.updateKeyProperties(keyToUpdate.getProperties().setExpiresOn(updated.getExpiresOn())); assertKeyEquals(updated, client.getKey(original.getName())); }); } /** * Tests that a key is able to be updated when it is disabled. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateDisabledKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); updateDisabledKeyRunner((original, updated) -> { assertKeyEquals(original, client.createKey(original)); KeyVaultKey keyToUpdate = client.getKey(original.getName()); client.updateKeyProperties(keyToUpdate.getProperties().setExpiresOn(updated.getExpiresOn())); assertKeyEquals(updated, client.getKey(original.getName())); }); } /** * Tests that an existing key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); getKeyRunner((original) -> { client.createKey(original); assertKeyEquals(original, client.getKey(original.getName())); }); } /** * Tests that a specific version of the key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeySpecificVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); getKeySpecificVersionRunner((key, keyWithNewVal) -> { KeyVaultKey keyVersionOne = client.createKey(key); KeyVaultKey keyVersionTwo = client.createKey(keyWithNewVal); assertKeyEquals(key, client.getKey(keyVersionOne.getName(), keyVersionOne.getProperties().getVersion())); assertKeyEquals(keyWithNewVal, client.getKey(keyVersionTwo.getName(), keyVersionTwo.getProperties().getVersion())); }); } /** * Tests that an attempt to get a non-existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an existing key can be deleted. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); deleteKeyRunner((keyToDelete) -> { assertKeyEquals(keyToDelete, client.createKey(keyToDelete)); SyncPoller<DeletedKey, Void> deletedKeyPoller = client.beginDeleteKey(keyToDelete.getName()); PollResponse<DeletedKey> pollResponse = deletedKeyPoller.poll(); DeletedKey deletedKey = pollResponse.getValue(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(2000); pollResponse = deletedKeyPoller.poll(); } assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDelete.getName(), deletedKey.getName()); client.purgeDeletedKey(keyToDelete.getName()); pollOnKeyPurge(keyToDelete.getName()); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginDeleteKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an attempt to retrieve a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a deleted key can be recovered on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); recoverDeletedKeyRunner((keyToDeleteAndRecover) -> { assertKeyEquals(keyToDeleteAndRecover, client.createKey(keyToDeleteAndRecover)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndRecover.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } assertNotNull(pollResponse.getValue()); SyncPoller<KeyVaultKey, Void> recoverPoller = client.beginRecoverDeletedKey(keyToDeleteAndRecover.getName()); PollResponse<KeyVaultKey> recoverPollResponse = recoverPoller.poll(); KeyVaultKey recoveredKey = recoverPollResponse.getValue(); recoverPollResponse = recoverPoller.poll(); while (!recoverPollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); recoverPollResponse = recoverPoller.poll(); } assertEquals(keyToDeleteAndRecover.getName(), recoveredKey.getName()); assertEquals(keyToDeleteAndRecover.getNotBefore(), recoveredKey.getProperties().getNotBefore()); assertEquals(keyToDeleteAndRecover.getExpiresOn(), recoveredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to recover a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginRecoverDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); backupKeyRunner((keyToBackup) -> { assertKeyEquals(keyToBackup, client.createKey(keyToBackup)); byte[] backupBytes = (client.backupKey(keyToBackup.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }); } /** * Tests that an attempt to backup a non existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); assertRestException(() -> client.backupKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); restoreKeyRunner((keyToBackupAndRestore) -> { assertKeyEquals(keyToBackupAndRestore, client.createKey(keyToBackupAndRestore)); byte[] backupBytes = (client.backupKey(keyToBackupAndRestore.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToBackupAndRestore.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } client.purgeDeletedKey(keyToBackupAndRestore.getName()); pollOnKeyPurge(keyToBackupAndRestore.getName()); sleepInRecordMode(60000); KeyVaultKey restoredKey = client.restoreKeyBackup(backupBytes); assertEquals(keyToBackupAndRestore.getName(), restoredKey.getName()); assertEquals(keyToBackupAndRestore.getExpiresOn(), restoredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to restore a key from malformed backup bytes throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKeyFromMalformedBackup(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); byte[] keyBackupBytes = "non-existing".getBytes(); assertRestException(() -> client.restoreKeyBackup(keyBackupBytes), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); listKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToList = keys; for (CreateKeyOptions key : keysToList.values()) { assertKeyEquals(key, client.createKey(key)); sleepInRecordMode(5000); } for (KeyProperties actualKey : client.listPropertiesOfKeys()) { if (keys.containsKey(actualKey.getName())) { CreateKeyOptions expectedKey = keys.get(actualKey.getName()); assertEquals(expectedKey.getExpiresOn(), actualKey.getExpiresOn()); assertEquals(expectedKey.getNotBefore(), actualKey.getNotBefore()); keys.remove(actualKey.getName()); } } assertEquals(0, keys.size()); }); } /** * Tests that a deleted key can be retrieved on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); getDeletedKeyRunner((keyToDeleteAndGet) -> { assertKeyEquals(keyToDeleteAndGet, client.createKey(keyToDeleteAndGet)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndGet.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } sleepInRecordMode(30000); DeletedKey deletedKey = client.getDeletedKey(keyToDeleteAndGet.getName()); assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDeleteAndGet.getName(), deletedKey.getName()); client.purgeDeletedKey(keyToDeleteAndGet.getName()); pollOnKeyPurge(keyToDeleteAndGet.getName()); sleepInRecordMode(10000); }); } /** * Tests that deleted keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listDeletedKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); listDeletedKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToDelete = keys; for (CreateKeyOptions key : keysToDelete.values()) { assertKeyEquals(key, client.createKey(key)); } for (CreateKeyOptions key : keysToDelete.values()) { SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(key.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } } sleepInRecordMode(300000); Iterable<DeletedKey> deletedKeys = client.listDeletedKeys(); assertTrue(deletedKeys.iterator().hasNext()); for (DeletedKey deletedKey : deletedKeys) { assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); } }); } /** * Tests that key versions can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeyVersions(HttpClient httpClient, KeyServiceVersion serviceVersion) { getKeyClient(httpClient, serviceVersion); listKeyVersionsRunner((keys) -> { List<CreateKeyOptions> keyVersions = keys; String keyName = null; for (CreateKeyOptions key : keyVersions) { keyName = key.getName(); assertKeyEquals(key, client.createKey(key)); } Iterable<KeyProperties> keyVersionsOutput = client.listPropertiesOfKeyVersions(keyName); List<KeyProperties> keyVersionsList = new ArrayList<>(); keyVersionsOutput.forEach(keyVersionsList::add); assertEquals(keyVersions.size(), keyVersionsList.size()); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyName); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } client.purgeDeletedKey(keyName); pollOnKeyPurge(keyName); }); } private DeletedKey pollOnKeyDeletion(String keyName) { int pendingPollCount = 0; while (pendingPollCount < 30) { DeletedKey deletedKey = null; try { deletedKey = client.getDeletedKey(keyName); } catch (ResourceNotFoundException e) { } if (deletedKey == null) { sleepInRecordMode(2000); pendingPollCount += 1; continue; } else { return deletedKey; } } System.err.printf("Deleted Key %s not found \n", keyName); return null; } private DeletedKey pollOnKeyPurge(String keyName) { int pendingPollCount = 0; while (pendingPollCount < 10) { DeletedKey deletedKey = null; try { deletedKey = client.getDeletedKey(keyName); } catch (ResourceNotFoundException e) { } if (deletedKey != null) { sleepInRecordMode(2000); pendingPollCount += 1; continue; } else { return deletedKey; } } System.err.printf("Deleted Key %s was not purged \n", keyName); return null; } }
Using `indexDoc` now
public void getDynamicDocumentCannotAlwaysDetermineCorrectType() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); List<Document> rooms = new ArrayList<>(); rooms.add(new Document(Collections.singletonMap("baseRate", NaN))); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("HotelName", "2015-02-11T12:58:00Z"); indexedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); indexedDoc.put("Rooms", rooms); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("HotelName", OffsetDateTime.of(2015, 2, 11, 12, 58, 0, 9, ZoneOffset.UTC)); expectedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); expectedDoc.put("Rooms", Collections.singleton(new Document(Collections.singletonMap("BaseRate", "NaN")))); client.index(new IndexBatch<>().addUploadAction(expectedDoc)); assertEquals(client.getDocumentWithResponse("1", new ArrayList<>(indexedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); }
client.index(new IndexBatch<>().addUploadAction(expectedDoc));
public void getDynamicDocumentCannotAlwaysDetermineCorrectType() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("HotelName", "2015-02-11T12:58:00Z"); indexedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); indexedDoc.put("Rooms", Collections.singletonList(new Document(Collections.singletonMap("BaseRate", NaN)))); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("HotelName", OffsetDateTime.of(2015, 2, 11, 12, 58, 0, 0, ZoneOffset.UTC)); expectedDoc.put("Location", GeoPoint.create(40.760586, -73.975403)); expectedDoc.put("Rooms", Collections.singletonList(new Document(Collections.singletonMap("BaseRate", "NaN")))); client.index(new IndexBatch<>().addUploadAction(indexedDoc)); List<String> selectedFields = Arrays.asList("HotelId", "HotelName", "Location", "Rooms/BaseRate"); assertEquals(expectedDoc, client.getDocumentWithResponse("1", selectedFields, null, Context.NONE).getValue()); }
class LookupSyncTests extends LookupTestBase { private SearchIndexClient client; @Test public void canGetStaticallyTypedDocument() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareExpectedHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareEmptyHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithPascalCaseFields() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = preparePascalCaseFieldsHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canRoundtripStaticallyTypedPrimitiveCollections() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); ModelWithPrimitiveCollections expected = preparePrimitivesModel(); uploadDocument(client, expected); Document result = client.getDocument(expected.key()); ModelWithPrimitiveCollections actual = convertToType(result, ModelWithPrimitiveCollections.class); TestHelpers.assetModelsWithPrimitivesEqual(expected, actual); } @Test public void getStaticallyTypedDocumentSetsUnselectedFieldsToNull() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel indexedDoc = prepareSelectedFieldsHotel(); Hotel expected = new Hotel() .hotelName("Countryside Hotel") .description("Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.") .address(new HotelAddress().city("Durham")) .rooms(Arrays.asList(new HotelRoom().baseRate(2.44), new HotelRoom().baseRate(7.69))); uploadDocument(client, indexedDoc); List<String> selectedFields = Arrays.asList("Description", "HotelName", "Address/City", "Rooms/BaseRate"); Response<Document> response = client.getDocumentWithResponse(indexedDoc.hotelId(), selectedFields, generateRequestOptions(), Context.NONE); Hotel actual = convertToType(response.getValue(), Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetDynamicDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document expectedDoc = new Document() { { put("HotelId", "1"); put("HotelName", null); put("Tags", Collections.emptyList()); put("ParkingIncluded", null); put("LastRenovationDate", null); put("Rating", null); put("Location", null); put("Address", null); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, expectedDoc); List<String> selectedFields = Arrays.asList("HotelId", "HotelName", "Tags", "ParkingIncluded", "LastRenovationDate", "Rating", "Location", "Address", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void getDynamicDocumentWithEmptyObjectsReturnsObjectsFullOfNulls() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Address", new Document()); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Address", new Document() { { put("StreetAddress", null); put("City", null); put("StateProvince", null); put("Country", null); put("PostalCode", null); } }); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Address"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void emptyDynamicallyTypedPrimitiveCollectionsRoundtripAsObjectArrays() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "3"; Document originalDoc = new Document() { { put("Key", docKey); put("Dates", new Object[]{}); put("Doubles", new Double[]{}); put("Bools", new boolean[]{}); put("Longs", new Long[]{}); put("Strings", new String[]{}); put("Ints", new int[]{}); put("Points", new Object[]{}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Collections.emptyList()); put("Bools", Collections.emptyList()); put("Longs", Collections.emptyList()); put("Strings", Collections.emptyList()); put("Ints", Collections.emptyList()); put("Points", Collections.emptyList()); put("Dates", Collections.emptyList()); } }; uploadDocument(client, originalDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } @Test public void emptyDynamicObjectsInCollectionExpandedOnGetWhenCollectionFieldSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } }, new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Override @Override public void canGetDocumentWithBase64EncodedKey() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); String complexKey = Base64.getEncoder().encodeToString(new byte[]{1, 2, 3, 4, 5}); Document expectedDoc = new Document(); expectedDoc.put("HotelId", complexKey); client.index(new IndexBatch<>().addUploadAction(expectedDoc)); assertEquals(client.getDocumentWithResponse(complexKey, new ArrayList<>(expectedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); } @Override public void roundTrippingDateTimeOffsetNormalizesToUtc() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("LastRenovationDate", dateFormat.parse("2010-06-27T00:00:00-08:00")); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("LastRenovationDate", dateFormat.parse("2010-06-27T08:00:00Z")); client.index(new IndexBatch<>().addUploadAction(indexedDoc)); assertEquals(client.getDocument("1"), expectedDoc); } @Test public void emptyDynamicObjectsOmittedFromCollectionOnGetWhenSubFieldsSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void dynamicallyTypedPrimitiveCollectionsDoNotAllRoundtripCorrectly() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "1"; OffsetDateTime dateTime = OffsetDateTime.parse("2019-08-13T14:30:00Z"); GeoPoint geoPoint = GeoPoint.create(1.0, 100.0); Document indexedDoc = new Document() { { put("Key", docKey); put("Dates", new OffsetDateTime[]{dateTime}); put("Doubles", new Double[]{0.0, 5.8, POSITIVE_INFINITY, NEGATIVE_INFINITY, NaN}); put("Bools", new Boolean[]{true, false}); put("Longs", new Long[]{9999999999999999L, 832372345832523L}); put("Strings", new String[]{"hello", "bye"}); put("Ints", new int[]{1, 2, 3, 4, -13, 5, 0}); put("Points", new GeoPoint[]{geoPoint}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Arrays.asList(0.0, 5.8, "INF", "-INF", "NaN")); put("Bools", Arrays.asList(true, false)); put("Longs", Arrays.asList(9999999999999999L, 832372345832523L)); put("Strings", Arrays.asList("hello", "bye")); put("Ints", Arrays.asList(1, 2, 3, 4, -13, 5, 0)); put("Points", Collections.singletonList(geoPoint)); put("Dates", Collections.singletonList(dateTime)); } }; uploadDocument(client, indexedDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } }
class LookupSyncTests extends SearchIndexClientTestBase { private static final String INDEX_NAME = "hotels"; private SearchIndexClient client; @Test public void canGetStaticallyTypedDocument() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareExpectedHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = prepareEmptyHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetStaticallyTypedDocumentWithPascalCaseFields() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel expected = preparePascalCaseFieldsHotel(); uploadDocument(client, expected); Document result = client.getDocument(expected.hotelId()); Hotel actual = convertToType(result, Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canRoundtripStaticallyTypedPrimitiveCollections() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); ModelWithPrimitiveCollections expected = preparePrimitivesModel(); uploadDocument(client, expected); Document result = client.getDocument(expected.key()); ModelWithPrimitiveCollections actual = convertToType(result, ModelWithPrimitiveCollections.class); TestHelpers.assetModelsWithPrimitivesEqual(expected, actual); } @Test public void getStaticallyTypedDocumentSetsUnselectedFieldsToNull() throws ParseException { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Hotel indexedDoc = prepareSelectedFieldsHotel(); Hotel expected = new Hotel() .hotelName("Countryside Hotel") .description("Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.") .address(new HotelAddress().city("Durham")) .rooms(Arrays.asList(new HotelRoom().baseRate(2.44), new HotelRoom().baseRate(7.69))); uploadDocument(client, indexedDoc); List<String> selectedFields = Arrays.asList("Description", "HotelName", "Address/City", "Rooms/BaseRate"); Response<Document> response = client.getDocumentWithResponse(indexedDoc.hotelId(), selectedFields, generateRequestOptions(), Context.NONE); Hotel actual = convertToType(response.getValue(), Hotel.class); TestHelpers.assertHotelsEqual(expected, actual); } @Test public void canGetDynamicDocumentWithNullOrEmptyValues() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document expectedDoc = new Document() { { put("HotelId", "1"); put("HotelName", null); put("Tags", Collections.emptyList()); put("ParkingIncluded", null); put("LastRenovationDate", null); put("Rating", null); put("Location", null); put("Address", null); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, expectedDoc); List<String> selectedFields = Arrays.asList("HotelId", "HotelName", "Tags", "ParkingIncluded", "LastRenovationDate", "Rating", "Location", "Address", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void getDynamicDocumentWithEmptyObjectsReturnsObjectsFullOfNulls() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Address", new Document()); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Address", new Document() { { put("StreetAddress", null); put("City", null); put("StateProvince", null); put("Country", null); put("PostalCode", null); } }); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Address"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void emptyDynamicallyTypedPrimitiveCollectionsRoundtripAsObjectArrays() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "3"; Document originalDoc = new Document() { { put("Key", docKey); put("Dates", new Object[]{}); put("Doubles", new Double[]{}); put("Bools", new boolean[]{}); put("Longs", new Long[]{}); put("Strings", new String[]{}); put("Ints", new int[]{}); put("Points", new Object[]{}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Collections.emptyList()); put("Bools", Collections.emptyList()); put("Longs", Collections.emptyList()); put("Strings", Collections.emptyList()); put("Ints", Collections.emptyList()); put("Points", Collections.emptyList()); put("Dates", Collections.emptyList()); } }; uploadDocument(client, originalDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } @Test public void emptyDynamicObjectsInCollectionExpandedOnGetWhenCollectionFieldSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } }, new Document() { { put("Description", null); put("Description_fr", null); put("Type", null); put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test @Test public void canGetDocumentWithBase64EncodedKey() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); String complexKey = Base64.getEncoder().encodeToString(new byte[]{1, 2, 3, 4, 5}); Document expectedDoc = new Document(); expectedDoc.put("HotelId", complexKey); client.index(new IndexBatch<>().addUploadAction(expectedDoc)); assertEquals(client.getDocumentWithResponse(complexKey, new ArrayList<>(expectedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); } @Test public void roundTrippingDateTimeOffsetNormalizesToUtc() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document indexedDoc = new Document(); indexedDoc.put("HotelId", "1"); indexedDoc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T00:00:00-08:00", DateTimeFormatter.ISO_DATE_TIME)); Document expectedDoc = new Document(); expectedDoc.put("HotelId", "1"); expectedDoc.put("LastRenovationDate", OffsetDateTime.parse("2010-06-27T08:00:00Z", DateTimeFormatter.ISO_DATE_TIME)); client.index(new IndexBatch<>().addUploadAction(indexedDoc)); assertEquals(client.getDocumentWithResponse("1", new ArrayList<>(expectedDoc.keySet()), null, Context.NONE).getValue(), expectedDoc); } @Test public void emptyDynamicObjectsOmittedFromCollectionOnGetWhenSubFieldsSelected() { createHotelIndex(); client = getSearchIndexClientBuilder(INDEX_NAME).buildClient(); Document originalDoc = new Document() { { put("HotelId", "1"); put("Rooms", Arrays.asList( new Document(), new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; Document expectedDoc = new Document() { { put("HotelId", "1"); put("Rooms", Collections.singletonList( new Document() { { put("BaseRate", null); put("BedOptions", null); put("SleepsCount", null); put("SmokingAllowed", null); put("Tags", Collections.emptyList()); } } )); } }; uploadDocument(client, originalDoc); List<String> selectedFields = Arrays.asList("HotelId", "Rooms/BaseRate", "Rooms/BedOptions", "Rooms/SleepsCount", "Rooms/SmokingAllowed", "Rooms/Tags"); Response<Document> response = client.getDocumentWithResponse("1", selectedFields, generateRequestOptions(), Context.NONE); assertEquals(expectedDoc, response.getValue()); } @Test public void dynamicallyTypedPrimitiveCollectionsDoNotAllRoundtripCorrectly() { String indexName = setupIndexWithDataTypes(); client = getSearchIndexClientBuilder(indexName).buildClient(); String docKey = "1"; OffsetDateTime dateTime = OffsetDateTime.parse("2019-08-13T14:30:00Z"); GeoPoint geoPoint = GeoPoint.create(1.0, 100.0); Document indexedDoc = new Document() { { put("Key", docKey); put("Dates", new OffsetDateTime[]{dateTime}); put("Doubles", new Double[]{0.0, 5.8, POSITIVE_INFINITY, NEGATIVE_INFINITY, NaN}); put("Bools", new Boolean[]{true, false}); put("Longs", new Long[]{9999999999999999L, 832372345832523L}); put("Strings", new String[]{"hello", "bye"}); put("Ints", new int[]{1, 2, 3, 4, -13, 5, 0}); put("Points", new GeoPoint[]{geoPoint}); } }; Document expectedDoc = new Document() { { put("Key", docKey); put("Doubles", Arrays.asList(0.0, 5.8, "INF", "-INF", "NaN")); put("Bools", Arrays.asList(true, false)); put("Longs", Arrays.asList(9999999999999999L, 832372345832523L)); put("Strings", Arrays.asList("hello", "bye")); put("Ints", Arrays.asList(1, 2, 3, 4, -13, 5, 0)); put("Points", Collections.singletonList(geoPoint)); put("Dates", Collections.singletonList(dateTime)); } }; uploadDocument(client, indexedDoc); Document actualDoc = client.getDocument(docKey); assertEquals(expectedDoc, actualDoc); } Hotel prepareExpectedHotel() throws ParseException { return new Hotel().hotelId("1") .hotelName("Fancy Stay") .description("Best hotel in town if you like luxury hotels. They have an amazing infinity pool, a spa, and a really helpful concierge. The location is perfect -- right downtown, close to all the tourist attractions. We highly recommend this hotel.") .descriptionFr("Meilleur hôtel en ville si vous aimez les hôtels de luxe. Ils ont une magnifique piscine à débordement, un spa et un concierge très utile. L'emplacement est parfait – en plein centre, à proximité de toutes les attractions touristiques. Nous recommandons fortement cet hôtel.") .category("Luxury") .tags(Arrays.asList("pool", "view", "wifi", "concierge")) .parkingIncluded(false) .smokingAllowed(false) .lastRenovationDate(DATE_FORMAT.parse("2010-06-27T00:00:00Z")) .rating(5) .location(GeoPoint.create(47.678581, -122.131577)) .rooms(new ArrayList<>()); } Hotel prepareEmptyHotel() { return new Hotel().hotelId("1") .tags(new ArrayList<>()) .rooms(Collections.singletonList( new HotelRoom().tags(new ArrayList<>()) )); } Hotel preparePascalCaseFieldsHotel() { return new Hotel().hotelId("123").hotelName("Lord of the Rings").description("J.R.R").descriptionFr("Tolkien"); } Hotel prepareSelectedFieldsHotel() throws ParseException { return new Hotel() .hotelId("2") .hotelName("Countryside Hotel") .description("Save up to 50% off traditional hotels. Free WiFi, great location near downtown, full kitchen, washer & dryer, 24/7 support, bowling alley, fitness center and more.") .descriptionFr("Économisez jusqu'à 50% sur les hôtels traditionnels. WiFi gratuit, très bien situé près du centre-ville, cuisine complète, laveuse & sécheuse, support 24/7, bowling, centre de fitness et plus encore.") .category("Budget") .tags(Arrays.asList("24-hour front desk service", "coffee in lobby", "restaurant")) .parkingIncluded(false) .smokingAllowed(true) .lastRenovationDate(DATE_FORMAT.parse("2010-06-27T00:00:00Z")) .rating(3) .location(GeoPoint.create(35.904160, -78.940483)) .address(new HotelAddress().streetAddress("6910 Fayetteville Rd").city("Durham").stateProvince("NC").country("USA").postalCode("27713")) .rooms(Arrays.asList( new HotelRoom() .description("Suite, 1 King Bed (Amenities)") .descriptionFr("Suite, 1 très grand lit (Services)") .type("Suite") .baseRate(2.44) .bedOptions("1 King Bed") .sleepsCount(2) .smokingAllowed(true) .tags(Collections.singletonList("coffee maker")), new HotelRoom() .description("Budget Room, 1 Queen Bed (Amenities)") .descriptionFr("Chambre Économique, 1 grand lit (Services)") .type("Budget Room") .baseRate(7.69) .bedOptions("1 Queen Bed") .sleepsCount(2) .smokingAllowed(false) .tags(Collections.singletonList("coffee maker")))); } ModelWithPrimitiveCollections preparePrimitivesModel() { return new ModelWithPrimitiveCollections() .key("1") .bools(new Boolean[]{true, false}) .dates(new OffsetDateTime[]{ OffsetDateTime.parse("2019-04-14T14:24:00Z"), OffsetDateTime.parse("1999-12-31T23:59:59Z")}) .doubles(new Double[]{NEGATIVE_INFINITY, 0.0, 2.78, NaN, 3.14, POSITIVE_INFINITY}) .ints(new int[]{1, 2, 3, 4, -13, 5, 0}) .longs(new Long[]{-9_999_999_999_999_999L, 832_372_345_832_523L}) .points(new GeoPoint[]{ GeoPoint.create(49.0, -67.0), GeoPoint.create(47.0, 21.0)}) .strings(new String[]{"hello", "2019-04-14T14:56:00-07:00"}); } String setupIndexWithDataTypes() { Index index = new Index() .setName("data-types-tests-index") .setFields(Arrays.asList( new Field() .setName("Key") .setType(DataType.EDM_STRING) .setKey(true) .setRetrievable(true), new Field() .setName("Bools") .setType(DataType.Collection(DataType.EDM_BOOLEAN)) .setRetrievable(true), new Field() .setName("Dates") .setType(DataType.Collection(DataType.EDM_DATE_TIME_OFFSET)) .setRetrievable(true), new Field() .setName("Doubles") .setType(DataType.Collection(DataType.EDM_DOUBLE)) .setRetrievable(true), new Field() .setName("Points") .setType(DataType.Collection(DataType.EDM_GEOGRAPHY_POINT)) .setRetrievable(true), new Field() .setName("Ints") .setType(DataType.Collection(DataType.EDM_INT32)) .setRetrievable(true), new Field() .setName("Longs") .setType(DataType.Collection(DataType.EDM_INT64)) .setRetrievable(true), new Field() .setName("Strings") .setType(DataType.Collection(DataType.EDM_STRING)) .setRetrievable(true) )); setupIndex(index); return index.getName(); } }
You might want to raise the exception with the inner error here when the error is null.
public Mono<DetectedLanguage> detectLanguage(String text, String countryHint) { return detectLanguageBatch(Collections.singletonList(text), countryHint, null) .map(documentCollection -> { final Iterator<DetectLanguageResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final DetectLanguageResult languageResult = iterator.next(); if (languageResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError())); } return languageResult.getPrimaryLanguage(); }); }
throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError()));
public Mono<DetectedLanguage> detectLanguage(String text, String countryHint) { return detectLanguageBatch(Collections.singletonList(text), countryHint, null) .map(documentCollection -> { final Iterator<DetectLanguageResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final DetectLanguageResult languageResult = iterator.next(); if (languageResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError())); } return languageResult.getPrimaryLanguage(); }); }
class TextAnalyticsAsyncClient { private final ClientLogger logger = new ClientLogger(TextAnalyticsAsyncClient.class); private final TextAnalyticsClientImpl service; private final TextAnalyticsServiceVersion serviceVersion; private final String defaultCountryHint; private final String defaultLanguage; final DetectLanguageAsyncClient detectLanguageAsyncClient; final AnalyzeSentimentAsyncClient analyzeSentimentAsyncClient; final ExtractKeyPhraseAsyncClient extractKeyPhraseAsyncClient; final RecognizeEntityAsyncClient recognizeEntityAsyncClient; final RecognizePiiEntityAsyncClient recognizePiiEntityAsyncClient; final RecognizeLinkedEntityAsyncClient recognizeLinkedEntityAsyncClient; /** * Create a {@code TextAnalyticsAsyncClient} that sends requests to the Text Analytics services's endpoint. Each * service call goes through the {@link TextAnalyticsClientBuilder * * @param service The proxy service used to perform REST calls. * @param serviceVersion The versions of Azure Text Analytics supported by this client library. * @param defaultCountryHint The default country hint. * @param defaultLanguage The default language. */ TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion, String defaultCountryHint, String defaultLanguage) { this.service = service; this.serviceVersion = serviceVersion; this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; this.detectLanguageAsyncClient = new DetectLanguageAsyncClient(service); this.analyzeSentimentAsyncClient = new AnalyzeSentimentAsyncClient(service); this.extractKeyPhraseAsyncClient = new ExtractKeyPhraseAsyncClient(service); this.recognizeEntityAsyncClient = new RecognizeEntityAsyncClient(service); this.recognizePiiEntityAsyncClient = new RecognizePiiEntityAsyncClient(service); this.recognizeLinkedEntityAsyncClient = new RecognizeLinkedEntityAsyncClient(service); } /** * Get default country hint code. * * @return the default country hint code */ public String getDefaultCountryHint() { return defaultCountryHint; } /** * Get default language when the builder is setup. * * @return the default language */ public String getDefaultLanguage() { return defaultLanguage; } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public TextAnalyticsServiceVersion getServiceVersion() { return serviceVersion; } /** * Returns the detected language and a numeric score between zero and one. Scores close to one indicate 100% * certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DetectedLanguage> detectLanguage(String text) { return detectLanguage(text, defaultCountryHint); } /** * Returns a {@link Response} containing the detected language and a numeric score between zero and one. Scores * close to one indicate 100% certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language with http response in a text with a provided country hint. Subscribes to the call * asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not * specified. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs. Subscribes to the call asynchronously and prints out the * detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch(Iterable<String> textInputs) { return detectLanguageBatch(textInputs, defaultCountryHint, null); } /** * Returns the detected language for a batch of input with the provided country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs with a provided country hint for the batch. Subscribes to the * call asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint A country hint for the entire batch. Accepts two letter country codes specified by ISO * 3166-1 alpha-2. Defaults to "US" if not specified. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch( Iterable<String> textInputs, String countryHint, TextAnalyticsRequestOptions options) { return detectLanguageBatchWithResponse( mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)), options) .flatMap(FluxUtil::toMono); } /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatchWithResponse * * @param textInputs The list of {@link DetectLanguageInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( Iterable<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext( context -> detectLanguageAsyncClient.detectLanguageBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the recognized entity * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text) { return recognizeEntities(text, defaultLanguage); } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with provided language hint. Subscribes to the call asynchronously and prints * out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the entity details * when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch(Iterable<String> textInputs) { return recognizeEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with the provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of general categorized entities for the provided list of text inputs. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeEntitiesResult>>> recognizeEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check <a href="https: * for the list of enabled languages. * * <p>Recognize PII entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link PiiEntity PII entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text) { return recognizePiiEntities(text, defaultLanguage); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check: <a href="https: * href="https: * * <p>Recognize PII entities in a text with provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link PiiEntity PII entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check: <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult PII entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs) { return recognizePiiEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult PII entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizePiiEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the batch of document inputs. For * the list of supported entity types,check: <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of TextDocumentInput with provided statistics options. Subscribes to the * call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizePiiEntitiesResult PII entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizePiiEntitiesResult>>> recognizePiiEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text) { return recognizeLinkedEntities(text, defaultLanguage); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text with provided language hint. Subscribes to the call asynchronously * and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs) { return recognizeLinkedEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeLinkedEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of inputs. See * <a href="https: * * <p>Recognize linked entities in a list of TextDocumentInput and provided reuqest options to show statistics. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeLinkedEntitiesResult>>> recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithResponse( textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * <p>Extract key phrases in a text. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text) { return extractKeyPhrases(text, defaultLanguage); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a text with a provided language. Subscribes to the call asynchronously and prints * out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * <p>Extract key phrases in a list of string inputs. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch(Iterable<String> textInputs) { return extractKeyPhrasesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of string inputs with a provided language. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return extractKeyPhrasesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of TextDocumentInput with request options. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text) { try { return analyzeSentiment(text, defaultLanguage); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text, String language) { return analyzeSentimentBatch(Collections.singletonList(text), language, null) .map(documentCollection -> { final Iterator<AnalyzeSentimentResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final AnalyzeSentimentResult sentimentResult = iterator.next(); if (sentimentResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(sentimentResult.getError())); } return sentimentResult.getDocumentSentiment(); }); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch(Iterable<String> textInputs) { return analyzeSentimentBatch(textInputs, defaultLanguage, null); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return analyzeSentimentBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput with provided request options. Subscribes to the call * asynchronously and prints out the sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> analyzeSentimentAsyncClient.analyzeSentimentBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } }
class TextAnalyticsAsyncClient { private final ClientLogger logger = new ClientLogger(TextAnalyticsAsyncClient.class); private final TextAnalyticsClientImpl service; private final TextAnalyticsServiceVersion serviceVersion; private final String defaultCountryHint; private final String defaultLanguage; final DetectLanguageAsyncClient detectLanguageAsyncClient; final AnalyzeSentimentAsyncClient analyzeSentimentAsyncClient; final ExtractKeyPhraseAsyncClient extractKeyPhraseAsyncClient; final RecognizeEntityAsyncClient recognizeEntityAsyncClient; final RecognizePiiEntityAsyncClient recognizePiiEntityAsyncClient; final RecognizeLinkedEntityAsyncClient recognizeLinkedEntityAsyncClient; /** * Create a {@code TextAnalyticsAsyncClient} that sends requests to the Text Analytics services's endpoint. Each * service call goes through the {@link TextAnalyticsClientBuilder * * @param service The proxy service used to perform REST calls. * @param serviceVersion The versions of Azure Text Analytics supported by this client library. * @param defaultCountryHint The default country hint. * @param defaultLanguage The default language. */ TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion, String defaultCountryHint, String defaultLanguage) { this.service = service; this.serviceVersion = serviceVersion; this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; this.detectLanguageAsyncClient = new DetectLanguageAsyncClient(service); this.analyzeSentimentAsyncClient = new AnalyzeSentimentAsyncClient(service); this.extractKeyPhraseAsyncClient = new ExtractKeyPhraseAsyncClient(service); this.recognizeEntityAsyncClient = new RecognizeEntityAsyncClient(service); this.recognizePiiEntityAsyncClient = new RecognizePiiEntityAsyncClient(service); this.recognizeLinkedEntityAsyncClient = new RecognizeLinkedEntityAsyncClient(service); } /** * Get default country hint code. * * @return the default country hint code */ public String getDefaultCountryHint() { return defaultCountryHint; } /** * Get default language when the builder is setup. * * @return the default language */ public String getDefaultLanguage() { return defaultLanguage; } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public TextAnalyticsServiceVersion getServiceVersion() { return serviceVersion; } /** * Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100% * certainty that the identified language is true. * * This method will use the default country hint that sets up in * {@link TextAnalyticsClientBuilder * the country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DetectedLanguage> detectLanguage(String text) { return detectLanguage(text, defaultCountryHint); } /** * Returns a {@link Response} containing the detected language and a confidence score between zero and one. Scores * close to one indicate 100% certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language with http response in a text with a provided country hint. Subscribes to the call * asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not * specified. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Returns the detected language for a batch of input. * * This method will use the default country hint that sets up in * {@link TextAnalyticsClientBuilder * the country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs. Subscribes to the call asynchronously and prints out the * detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch(Iterable<String> textInputs) { return detectLanguageBatch(textInputs, defaultCountryHint, null); } /** * Returns the detected language for a batch of input with the provided country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs with a provided country hint for the batch. Subscribes to the * call asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint A country hint for the entire batch. Accepts two letter country codes specified by ISO * 3166-1 alpha-2. Defaults to "US" if not specified. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch( Iterable<String> textInputs, String countryHint, TextAnalyticsRequestOptions options) { return detectLanguageBatchWithResponse( mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)), options) .flatMap(FluxUtil::toMono); } /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatchWithResponse * * @param textInputs The list of {@link DetectLanguageInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( Iterable<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext( context -> detectLanguageAsyncClient.detectLanguageBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the recognized entity * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text) { return recognizeEntities(text, defaultLanguage); } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with provided language hint. Subscribes to the call asynchronously and prints * out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of general categorized entities for the provided list of texts. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the entity details * when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch(Iterable<String> textInputs) { return recognizeEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with the provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of general categorized entities for the provided list of text inputs. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeEntitiesResult>>> recognizeEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check <a href="https: * for the list of enabled languages. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize Personally Identifiable Information entities in a text. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link PiiEntity Personally Identifiable Information entities} of * the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text) { return recognizePiiEntities(text, defaultLanguage); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check: <a href="https: * href="https: * * <p>Recognize Personally Identifiable Information entities in a text with provided language hint. Subscribes to * the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link PiiEntity Personally Identifiable Information entities} of * the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check: <a href="https: * check: <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize Personally Identifiable Information entities in a list of string inputs. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs) { return recognizePiiEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check <a href="https: * check: <a href="https: * * <p>Recognize Personally Identifiable Information entities in a list of string inputs with provided language hint. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizePiiEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the batch of document inputs. For * the list of supported entity types,check: <a href="https: * check: <a href="https: * * <p>Recognize Personally Identifiable Information entities in a list of TextDocumentInput with provided * statistics options. Subscribes to the call asynchronously and prints out the entity details when a response is * received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize * Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizePiiEntitiesResult>>> recognizePiiEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize linked entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text) { return recognizeLinkedEntities(text, defaultLanguage); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text with provided language hint. Subscribes to the call asynchronously * and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize linked entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs) { return recognizeLinkedEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeLinkedEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of inputs. See * <a href="https: * * <p>Recognize linked entities in a list of TextDocumentInput and provided reuqest options to show statistics. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeLinkedEntitiesResult>>> recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithResponse( textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Extract key phrases in a text. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text) { return extractKeyPhrases(text, defaultLanguage); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a text with a provided language. Subscribes to the call asynchronously and prints * out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Extract key phrases in a list of string inputs. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch(Iterable<String> textInputs) { return extractKeyPhrasesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of string inputs with a provided language. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return extractKeyPhrasesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of TextDocumentInput with request options. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text) { try { return analyzeSentiment(text, defaultLanguage); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text, String language) { return analyzeSentimentBatch(Collections.singletonList(text), language, null) .map(documentCollection -> { final Iterator<AnalyzeSentimentResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final AnalyzeSentimentResult sentimentResult = iterator.next(); if (sentimentResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(sentimentResult.getError())); } return sentimentResult.getDocumentSentiment(); }); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch(Iterable<String> textInputs) { return analyzeSentimentBatch(textInputs, defaultLanguage, null); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return analyzeSentimentBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput with provided request options. Subscribes to the call * asynchronously and prints out the sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> analyzeSentimentAsyncClient.analyzeSentimentBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } }
`documentCollection` 's error is either the `InnerError` or `TextAnalyticsError`. before calling .map(), the caller already take care of the scenario transform, such that ` ``` Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse() called toDocumentResultCollection() which called Transforms.toTextAnalyticsError() ```
public Mono<DetectedLanguage> detectLanguage(String text, String countryHint) { return detectLanguageBatch(Collections.singletonList(text), countryHint, null) .map(documentCollection -> { final Iterator<DetectLanguageResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final DetectLanguageResult languageResult = iterator.next(); if (languageResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError())); } return languageResult.getPrimaryLanguage(); }); }
throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError()));
public Mono<DetectedLanguage> detectLanguage(String text, String countryHint) { return detectLanguageBatch(Collections.singletonList(text), countryHint, null) .map(documentCollection -> { final Iterator<DetectLanguageResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final DetectLanguageResult languageResult = iterator.next(); if (languageResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError())); } return languageResult.getPrimaryLanguage(); }); }
class TextAnalyticsAsyncClient { private final ClientLogger logger = new ClientLogger(TextAnalyticsAsyncClient.class); private final TextAnalyticsClientImpl service; private final TextAnalyticsServiceVersion serviceVersion; private final String defaultCountryHint; private final String defaultLanguage; final DetectLanguageAsyncClient detectLanguageAsyncClient; final AnalyzeSentimentAsyncClient analyzeSentimentAsyncClient; final ExtractKeyPhraseAsyncClient extractKeyPhraseAsyncClient; final RecognizeEntityAsyncClient recognizeEntityAsyncClient; final RecognizePiiEntityAsyncClient recognizePiiEntityAsyncClient; final RecognizeLinkedEntityAsyncClient recognizeLinkedEntityAsyncClient; /** * Create a {@code TextAnalyticsAsyncClient} that sends requests to the Text Analytics services's endpoint. Each * service call goes through the {@link TextAnalyticsClientBuilder * * @param service The proxy service used to perform REST calls. * @param serviceVersion The versions of Azure Text Analytics supported by this client library. * @param defaultCountryHint The default country hint. * @param defaultLanguage The default language. */ TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion, String defaultCountryHint, String defaultLanguage) { this.service = service; this.serviceVersion = serviceVersion; this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; this.detectLanguageAsyncClient = new DetectLanguageAsyncClient(service); this.analyzeSentimentAsyncClient = new AnalyzeSentimentAsyncClient(service); this.extractKeyPhraseAsyncClient = new ExtractKeyPhraseAsyncClient(service); this.recognizeEntityAsyncClient = new RecognizeEntityAsyncClient(service); this.recognizePiiEntityAsyncClient = new RecognizePiiEntityAsyncClient(service); this.recognizeLinkedEntityAsyncClient = new RecognizeLinkedEntityAsyncClient(service); } /** * Get default country hint code. * * @return the default country hint code */ public String getDefaultCountryHint() { return defaultCountryHint; } /** * Get default language when the builder is setup. * * @return the default language */ public String getDefaultLanguage() { return defaultLanguage; } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public TextAnalyticsServiceVersion getServiceVersion() { return serviceVersion; } /** * Returns the detected language and a numeric score between zero and one. Scores close to one indicate 100% * certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DetectedLanguage> detectLanguage(String text) { return detectLanguage(text, defaultCountryHint); } /** * Returns a {@link Response} containing the detected language and a numeric score between zero and one. Scores * close to one indicate 100% certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language with http response in a text with a provided country hint. Subscribes to the call * asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not * specified. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs. Subscribes to the call asynchronously and prints out the * detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch(Iterable<String> textInputs) { return detectLanguageBatch(textInputs, defaultCountryHint, null); } /** * Returns the detected language for a batch of input with the provided country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs with a provided country hint for the batch. Subscribes to the * call asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint A country hint for the entire batch. Accepts two letter country codes specified by ISO * 3166-1 alpha-2. Defaults to "US" if not specified. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch( Iterable<String> textInputs, String countryHint, TextAnalyticsRequestOptions options) { return detectLanguageBatchWithResponse( mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)), options) .flatMap(FluxUtil::toMono); } /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatchWithResponse * * @param textInputs The list of {@link DetectLanguageInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( Iterable<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext( context -> detectLanguageAsyncClient.detectLanguageBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the recognized entity * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text) { return recognizeEntities(text, defaultLanguage); } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with provided language hint. Subscribes to the call asynchronously and prints * out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the entity details * when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch(Iterable<String> textInputs) { return recognizeEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with the provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of general categorized entities for the provided list of text inputs. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeEntitiesResult>>> recognizeEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check <a href="https: * for the list of enabled languages. * * <p>Recognize PII entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link PiiEntity PII entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text) { return recognizePiiEntities(text, defaultLanguage); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check: <a href="https: * href="https: * * <p>Recognize PII entities in a text with provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link PiiEntity PII entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check: <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult PII entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs) { return recognizePiiEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult PII entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizePiiEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the batch of document inputs. For * the list of supported entity types,check: <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of TextDocumentInput with provided statistics options. Subscribes to the * call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizePiiEntitiesResult PII entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizePiiEntitiesResult>>> recognizePiiEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text) { return recognizeLinkedEntities(text, defaultLanguage); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text with provided language hint. Subscribes to the call asynchronously * and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs) { return recognizeLinkedEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeLinkedEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of inputs. See * <a href="https: * * <p>Recognize linked entities in a list of TextDocumentInput and provided reuqest options to show statistics. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeLinkedEntitiesResult>>> recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithResponse( textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * <p>Extract key phrases in a text. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text) { return extractKeyPhrases(text, defaultLanguage); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a text with a provided language. Subscribes to the call asynchronously and prints * out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * <p>Extract key phrases in a list of string inputs. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch(Iterable<String> textInputs) { return extractKeyPhrasesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of string inputs with a provided language. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return extractKeyPhrasesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of TextDocumentInput with request options. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text) { try { return analyzeSentiment(text, defaultLanguage); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text, String language) { return analyzeSentimentBatch(Collections.singletonList(text), language, null) .map(documentCollection -> { final Iterator<AnalyzeSentimentResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final AnalyzeSentimentResult sentimentResult = iterator.next(); if (sentimentResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(sentimentResult.getError())); } return sentimentResult.getDocumentSentiment(); }); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch(Iterable<String> textInputs) { return analyzeSentimentBatch(textInputs, defaultLanguage, null); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return analyzeSentimentBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput with provided request options. Subscribes to the call * asynchronously and prints out the sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> analyzeSentimentAsyncClient.analyzeSentimentBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } }
class TextAnalyticsAsyncClient { private final ClientLogger logger = new ClientLogger(TextAnalyticsAsyncClient.class); private final TextAnalyticsClientImpl service; private final TextAnalyticsServiceVersion serviceVersion; private final String defaultCountryHint; private final String defaultLanguage; final DetectLanguageAsyncClient detectLanguageAsyncClient; final AnalyzeSentimentAsyncClient analyzeSentimentAsyncClient; final ExtractKeyPhraseAsyncClient extractKeyPhraseAsyncClient; final RecognizeEntityAsyncClient recognizeEntityAsyncClient; final RecognizePiiEntityAsyncClient recognizePiiEntityAsyncClient; final RecognizeLinkedEntityAsyncClient recognizeLinkedEntityAsyncClient; /** * Create a {@code TextAnalyticsAsyncClient} that sends requests to the Text Analytics services's endpoint. Each * service call goes through the {@link TextAnalyticsClientBuilder * * @param service The proxy service used to perform REST calls. * @param serviceVersion The versions of Azure Text Analytics supported by this client library. * @param defaultCountryHint The default country hint. * @param defaultLanguage The default language. */ TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion, String defaultCountryHint, String defaultLanguage) { this.service = service; this.serviceVersion = serviceVersion; this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; this.detectLanguageAsyncClient = new DetectLanguageAsyncClient(service); this.analyzeSentimentAsyncClient = new AnalyzeSentimentAsyncClient(service); this.extractKeyPhraseAsyncClient = new ExtractKeyPhraseAsyncClient(service); this.recognizeEntityAsyncClient = new RecognizeEntityAsyncClient(service); this.recognizePiiEntityAsyncClient = new RecognizePiiEntityAsyncClient(service); this.recognizeLinkedEntityAsyncClient = new RecognizeLinkedEntityAsyncClient(service); } /** * Get default country hint code. * * @return the default country hint code */ public String getDefaultCountryHint() { return defaultCountryHint; } /** * Get default language when the builder is setup. * * @return the default language */ public String getDefaultLanguage() { return defaultLanguage; } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public TextAnalyticsServiceVersion getServiceVersion() { return serviceVersion; } /** * Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100% * certainty that the identified language is true. * * This method will use the default country hint that sets up in * {@link TextAnalyticsClientBuilder * the country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DetectedLanguage> detectLanguage(String text) { return detectLanguage(text, defaultCountryHint); } /** * Returns a {@link Response} containing the detected language and a confidence score between zero and one. Scores * close to one indicate 100% certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language with http response in a text with a provided country hint. Subscribes to the call * asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not * specified. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Returns the detected language for a batch of input. * * This method will use the default country hint that sets up in * {@link TextAnalyticsClientBuilder * the country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs. Subscribes to the call asynchronously and prints out the * detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch(Iterable<String> textInputs) { return detectLanguageBatch(textInputs, defaultCountryHint, null); } /** * Returns the detected language for a batch of input with the provided country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs with a provided country hint for the batch. Subscribes to the * call asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint A country hint for the entire batch. Accepts two letter country codes specified by ISO * 3166-1 alpha-2. Defaults to "US" if not specified. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch( Iterable<String> textInputs, String countryHint, TextAnalyticsRequestOptions options) { return detectLanguageBatchWithResponse( mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)), options) .flatMap(FluxUtil::toMono); } /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatchWithResponse * * @param textInputs The list of {@link DetectLanguageInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( Iterable<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext( context -> detectLanguageAsyncClient.detectLanguageBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the recognized entity * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text) { return recognizeEntities(text, defaultLanguage); } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with provided language hint. Subscribes to the call asynchronously and prints * out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of general categorized entities for the provided list of texts. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the entity details * when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch(Iterable<String> textInputs) { return recognizeEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with the provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of general categorized entities for the provided list of text inputs. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeEntitiesResult>>> recognizeEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check <a href="https: * for the list of enabled languages. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize Personally Identifiable Information entities in a text. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link PiiEntity Personally Identifiable Information entities} of * the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text) { return recognizePiiEntities(text, defaultLanguage); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check: <a href="https: * href="https: * * <p>Recognize Personally Identifiable Information entities in a text with provided language hint. Subscribes to * the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link PiiEntity Personally Identifiable Information entities} of * the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check: <a href="https: * check: <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize Personally Identifiable Information entities in a list of string inputs. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs) { return recognizePiiEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check <a href="https: * check: <a href="https: * * <p>Recognize Personally Identifiable Information entities in a list of string inputs with provided language hint. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizePiiEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the batch of document inputs. For * the list of supported entity types,check: <a href="https: * check: <a href="https: * * <p>Recognize Personally Identifiable Information entities in a list of TextDocumentInput with provided * statistics options. Subscribes to the call asynchronously and prints out the entity details when a response is * received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize * Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizePiiEntitiesResult>>> recognizePiiEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize linked entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text) { return recognizeLinkedEntities(text, defaultLanguage); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text with provided language hint. Subscribes to the call asynchronously * and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize linked entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs) { return recognizeLinkedEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeLinkedEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of inputs. See * <a href="https: * * <p>Recognize linked entities in a list of TextDocumentInput and provided reuqest options to show statistics. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeLinkedEntitiesResult>>> recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithResponse( textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Extract key phrases in a text. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text) { return extractKeyPhrases(text, defaultLanguage); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a text with a provided language. Subscribes to the call asynchronously and prints * out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Extract key phrases in a list of string inputs. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch(Iterable<String> textInputs) { return extractKeyPhrasesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of string inputs with a provided language. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return extractKeyPhrasesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of TextDocumentInput with request options. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text) { try { return analyzeSentiment(text, defaultLanguage); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text, String language) { return analyzeSentimentBatch(Collections.singletonList(text), language, null) .map(documentCollection -> { final Iterator<AnalyzeSentimentResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final AnalyzeSentimentResult sentimentResult = iterator.next(); if (sentimentResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(sentimentResult.getError())); } return sentimentResult.getDocumentSentiment(); }); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch(Iterable<String> textInputs) { return analyzeSentimentBatch(textInputs, defaultLanguage, null); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return analyzeSentimentBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput with provided request options. Subscribes to the call * asynchronously and prints out the sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> analyzeSentimentAsyncClient.analyzeSentimentBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } }
We do this because there is another type with this name, right? is it the generated one? what is the difference between both?
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
Yes. There is one generated one. Auto-generated one has setter methods.
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
and why we can't use that one?
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
Is this the autogenerated one? why we use one here and in other places the one from `com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel`
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(),
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
It might be me and my limited knowledge of Java. It just seems strange how we are using for some things one and not other
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(),
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
NIT: will never be empty
public Mono<DetectedLanguage> detectLanguage(String text, String countryHint) { return detectLanguageBatch(Collections.singletonList(text), countryHint, null) .map(documentCollection -> { final Iterator<DetectLanguageResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final DetectLanguageResult languageResult = iterator.next(); if (languageResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError())); } return languageResult.getPrimaryLanguage(); }); }
public Mono<DetectedLanguage> detectLanguage(String text, String countryHint) { return detectLanguageBatch(Collections.singletonList(text), countryHint, null) .map(documentCollection -> { final Iterator<DetectLanguageResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final DetectLanguageResult languageResult = iterator.next(); if (languageResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(languageResult.getError())); } return languageResult.getPrimaryLanguage(); }); }
class TextAnalyticsAsyncClient { private final ClientLogger logger = new ClientLogger(TextAnalyticsAsyncClient.class); private final TextAnalyticsClientImpl service; private final TextAnalyticsServiceVersion serviceVersion; private final String defaultCountryHint; private final String defaultLanguage; final DetectLanguageAsyncClient detectLanguageAsyncClient; final AnalyzeSentimentAsyncClient analyzeSentimentAsyncClient; final ExtractKeyPhraseAsyncClient extractKeyPhraseAsyncClient; final RecognizeEntityAsyncClient recognizeEntityAsyncClient; final RecognizePiiEntityAsyncClient recognizePiiEntityAsyncClient; final RecognizeLinkedEntityAsyncClient recognizeLinkedEntityAsyncClient; /** * Create a {@code TextAnalyticsAsyncClient} that sends requests to the Text Analytics services's endpoint. Each * service call goes through the {@link TextAnalyticsClientBuilder * * @param service The proxy service used to perform REST calls. * @param serviceVersion The versions of Azure Text Analytics supported by this client library. * @param defaultCountryHint The default country hint. * @param defaultLanguage The default language. */ TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion, String defaultCountryHint, String defaultLanguage) { this.service = service; this.serviceVersion = serviceVersion; this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; this.detectLanguageAsyncClient = new DetectLanguageAsyncClient(service); this.analyzeSentimentAsyncClient = new AnalyzeSentimentAsyncClient(service); this.extractKeyPhraseAsyncClient = new ExtractKeyPhraseAsyncClient(service); this.recognizeEntityAsyncClient = new RecognizeEntityAsyncClient(service); this.recognizePiiEntityAsyncClient = new RecognizePiiEntityAsyncClient(service); this.recognizeLinkedEntityAsyncClient = new RecognizeLinkedEntityAsyncClient(service); } /** * Get default country hint code. * * @return the default country hint code */ public String getDefaultCountryHint() { return defaultCountryHint; } /** * Get default language when the builder is setup. * * @return the default language */ public String getDefaultLanguage() { return defaultLanguage; } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public TextAnalyticsServiceVersion getServiceVersion() { return serviceVersion; } /** * Returns the detected language and a numeric score between zero and one. Scores close to one indicate 100% * certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DetectedLanguage> detectLanguage(String text) { return detectLanguage(text, defaultCountryHint); } /** * Returns a {@link Response} containing the detected language and a numeric score between zero and one. Scores * close to one indicate 100% certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language with http response in a text with a provided country hint. Subscribes to the call * asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not * specified. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs. Subscribes to the call asynchronously and prints out the * detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch(Iterable<String> textInputs) { return detectLanguageBatch(textInputs, defaultCountryHint, null); } /** * Returns the detected language for a batch of input with the provided country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs with a provided country hint for the batch. Subscribes to the * call asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint A country hint for the entire batch. Accepts two letter country codes specified by ISO * 3166-1 alpha-2. Defaults to "US" if not specified. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch( Iterable<String> textInputs, String countryHint, TextAnalyticsRequestOptions options) { return detectLanguageBatchWithResponse( mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)), options) .flatMap(FluxUtil::toMono); } /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatchWithResponse * * @param textInputs The list of {@link DetectLanguageInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( Iterable<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext( context -> detectLanguageAsyncClient.detectLanguageBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the recognized entity * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text) { return recognizeEntities(text, defaultLanguage); } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with provided language hint. Subscribes to the call asynchronously and prints * out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the entity details * when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch(Iterable<String> textInputs) { return recognizeEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with the provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of general categorized entities for the provided list of text inputs. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeEntitiesResult>>> recognizeEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check <a href="https: * for the list of enabled languages. * * <p>Recognize PII entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link PiiEntity PII entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text) { return recognizePiiEntities(text, defaultLanguage); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check: <a href="https: * href="https: * * <p>Recognize PII entities in a text with provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link PiiEntity PII entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check: <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult PII entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs) { return recognizePiiEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult PII entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizePiiEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the batch of document inputs. For * the list of supported entity types,check: <a href="https: * check: <a href="https: * * <p>Recognize PII entities in a list of TextDocumentInput with provided statistics options. Subscribes to the * call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize PII entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizePiiEntitiesResult PII entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizePiiEntitiesResult>>> recognizePiiEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text) { return recognizeLinkedEntities(text, defaultLanguage); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text with provided language hint. Subscribes to the call asynchronously * and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs) { return recognizeLinkedEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeLinkedEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of inputs. See * <a href="https: * * <p>Recognize linked entities in a list of TextDocumentInput and provided reuqest options to show statistics. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeLinkedEntitiesResult>>> recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithResponse( textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * <p>Extract key phrases in a text. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text) { return extractKeyPhrases(text, defaultLanguage); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a text with a provided language. Subscribes to the call asynchronously and prints * out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * <p>Extract key phrases in a list of string inputs. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch(Iterable<String> textInputs) { return extractKeyPhrasesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of string inputs with a provided language. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return extractKeyPhrasesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of TextDocumentInput with request options. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text) { try { return analyzeSentiment(text, defaultLanguage); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text, String language) { return analyzeSentimentBatch(Collections.singletonList(text), language, null) .map(documentCollection -> { final Iterator<AnalyzeSentimentResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final AnalyzeSentimentResult sentimentResult = iterator.next(); if (sentimentResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(sentimentResult.getError())); } return sentimentResult.getDocumentSentiment(); }); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch(Iterable<String> textInputs) { return analyzeSentimentBatch(textInputs, defaultLanguage, null); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return analyzeSentimentBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a sentiment prediction, as well as sentiment scores for each sentiment class (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput with provided request options. Subscribes to the call * asynchronously and prints out the sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> analyzeSentimentAsyncClient.analyzeSentimentBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } }
class TextAnalyticsAsyncClient { private final ClientLogger logger = new ClientLogger(TextAnalyticsAsyncClient.class); private final TextAnalyticsClientImpl service; private final TextAnalyticsServiceVersion serviceVersion; private final String defaultCountryHint; private final String defaultLanguage; final DetectLanguageAsyncClient detectLanguageAsyncClient; final AnalyzeSentimentAsyncClient analyzeSentimentAsyncClient; final ExtractKeyPhraseAsyncClient extractKeyPhraseAsyncClient; final RecognizeEntityAsyncClient recognizeEntityAsyncClient; final RecognizePiiEntityAsyncClient recognizePiiEntityAsyncClient; final RecognizeLinkedEntityAsyncClient recognizeLinkedEntityAsyncClient; /** * Create a {@code TextAnalyticsAsyncClient} that sends requests to the Text Analytics services's endpoint. Each * service call goes through the {@link TextAnalyticsClientBuilder * * @param service The proxy service used to perform REST calls. * @param serviceVersion The versions of Azure Text Analytics supported by this client library. * @param defaultCountryHint The default country hint. * @param defaultLanguage The default language. */ TextAnalyticsAsyncClient(TextAnalyticsClientImpl service, TextAnalyticsServiceVersion serviceVersion, String defaultCountryHint, String defaultLanguage) { this.service = service; this.serviceVersion = serviceVersion; this.defaultCountryHint = defaultCountryHint; this.defaultLanguage = defaultLanguage; this.detectLanguageAsyncClient = new DetectLanguageAsyncClient(service); this.analyzeSentimentAsyncClient = new AnalyzeSentimentAsyncClient(service); this.extractKeyPhraseAsyncClient = new ExtractKeyPhraseAsyncClient(service); this.recognizeEntityAsyncClient = new RecognizeEntityAsyncClient(service); this.recognizePiiEntityAsyncClient = new RecognizePiiEntityAsyncClient(service); this.recognizeLinkedEntityAsyncClient = new RecognizeLinkedEntityAsyncClient(service); } /** * Get default country hint code. * * @return the default country hint code */ public String getDefaultCountryHint() { return defaultCountryHint; } /** * Get default language when the builder is setup. * * @return the default language */ public String getDefaultLanguage() { return defaultLanguage; } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public TextAnalyticsServiceVersion getServiceVersion() { return serviceVersion; } /** * Returns the detected language and a confidence score between zero and one. Scores close to one indicate 100% * certainty that the identified language is true. * * This method will use the default country hint that sets up in * {@link TextAnalyticsClientBuilder * the country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DetectedLanguage> detectLanguage(String text) { return detectLanguage(text, defaultCountryHint); } /** * Returns a {@link Response} containing the detected language and a confidence score between zero and one. Scores * close to one indicate 100% certainty that the identified language is true. * * <p><strong>Code sample</strong></p> * <p>Detects language with http response in a text with a provided country hint. Subscribes to the call * asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguage * * @param text The text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint Accepts two letter country codes specified by ISO 3166-1 alpha-2. Defaults to "US" if not * specified. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DetectedLanguage detected language} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) /** * Returns the detected language for a batch of input. * * This method will use the default country hint that sets up in * {@link TextAnalyticsClientBuilder * the country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs. Subscribes to the call asynchronously and prints out the * detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch(Iterable<String> textInputs) { return detectLanguageBatch(textInputs, defaultCountryHint, null); } /** * Returns the detected language for a batch of input with the provided country hint. * * <p><strong>Code sample</strong></p> * <p>Detects language in a list of string inputs with a provided country hint for the batch. Subscribes to the * call asynchronously and prints out the detected language details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatch * * @param textInputs The list of texts to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param countryHint A country hint for the entire batch. Accepts two letter country codes specified by ISO * 3166-1 alpha-2. Defaults to "US" if not specified. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link DocumentResultCollection batch} of the * {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<DetectLanguageResult>> detectLanguageBatch( Iterable<String> textInputs, String countryHint, TextAnalyticsRequestOptions options) { return detectLanguageBatchWithResponse( mapByIndex(textInputs, (index, value) -> new DetectLanguageInput(index, value, countryHint)), options) .flatMap(FluxUtil::toMono); } /** * Returns the detected language for a batch of input. * * <p><strong>Code sample</strong></p> * <p>Detects language in a text. Subscribes to the call asynchronously and prints out the detected language * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.detectLanguageBatchWithResponse * * @param textInputs The list of {@link DetectLanguageInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link DetectLanguageResult detected language}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<DetectLanguageResult>>> detectLanguageBatchWithResponse( Iterable<DetectLanguageInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext( context -> detectLanguageAsyncClient.detectLanguageBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the recognized entity * details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text) { return recognizeEntities(text, defaultLanguage); } /** * Returns a list of general categorized entities in the provided text. For a list of supported entity types, * check: <a href="https: * check: <a href="https: * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with provided language hint. Subscribes to the call asynchronously and prints * out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntities * * @param text the text to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * * @return A {@link PagedFlux} containing the {@link CategorizedEntity categorized entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<CategorizedEntity> recognizeEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of general categorized entities for the provided list of texts. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text. Subscribes to the call asynchronously and prints out the entity details * when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch(Iterable<String> textInputs) { return recognizeEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of general categorized entities for the provided list of texts. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a text with the provided language hint. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatch * * @param textInputs A list of texts to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language. If not set, uses "en" for English as * default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeEntitiesResult>> recognizeEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of general categorized entities for the provided list of text inputs. * * <p><strong>Code sample</strong></p> * <p>Recognize entities in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeEntitiesResult categorized entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeEntitiesResult>>> recognizeEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeEntityAsyncClient.recognizeEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check <a href="https: * for the list of enabled languages. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize Personally Identifiable Information entities in a text. Subscribes to the call asynchronously and * prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link PiiEntity Personally Identifiable Information entities} of * the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text) { return recognizePiiEntities(text, defaultLanguage); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the text. For the list of * supported entity types, check: <a href="https: * href="https: * * <p>Recognize Personally Identifiable Information entities in a text with provided language hint. Subscribes to * the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntities * * @param text the text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link PiiEntity Personally Identifiable Information entities} of * the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<PiiEntity> recognizePiiEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check: <a href="https: * check: <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize Personally Identifiable Information entities in a list of string inputs. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs) { return recognizePiiEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the list of texts. For the list * of supported entity types, check <a href="https: * check: <a href="https: * * <p>Recognize Personally Identifiable Information entities in a list of string inputs with provided language hint. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatch * * @param textInputs A list of text to recognize Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizePiiEntitiesResult>> recognizePiiEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizePiiEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of personal information entities ("SSN", "Bank Account", etc) in the batch of document inputs. For * the list of supported entity types,check: <a href="https: * check: <a href="https: * * <p>Recognize Personally Identifiable Information entities in a list of TextDocumentInput with provided * statistics options. Subscribes to the call asynchronously and prints out the entity details when a response is * received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize * Personally Identifiable Information entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of * {@link RecognizePiiEntitiesResult Personally Identifiable Information entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizePiiEntitiesResult>>> recognizePiiEntitiesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizePiiEntityAsyncClient.recognizePiiEntitiesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize linked entities in a text. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text) { return recognizeLinkedEntities(text, defaultLanguage); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the provided text. See * <a href="https: * * <p>Recognize linked entities in a text with provided language hint. Subscribes to the call asynchronously * and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntities * * @param text the text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the {@link LinkedEntity linked entities} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<LinkedEntity> recognizeLinkedEntities(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Recognize linked entities in a list of string inputs. Subscribes to the call asynchronously and prints out the * entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs) { return recognizeLinkedEntitiesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of texts. See * <a href="https: * * <p>Recognize linked entities in a list of string inputs with provided language hint. Subscribes to the call * asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatch * * @param textInputs A list of text to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<RecognizeLinkedEntitiesResult>> recognizeLinkedEntitiesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return recognizeLinkedEntitiesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of recognized entities with links to a well-known knowledge base for the list of inputs. See * <a href="https: * * <p>Recognize linked entities in a list of TextDocumentInput and provided reuqest options to show statistics. * Subscribes to the call asynchronously and prints out the entity details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to recognize linked entities for. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link RecognizeLinkedEntitiesResult linked entity}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<RecognizeLinkedEntitiesResult>>> recognizeLinkedEntitiesBatchWithResponse(Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> recognizeLinkedEntityAsyncClient.recognizeLinkedEntitiesBatchWithResponse( textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Extract key phrases in a text. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text) { return extractKeyPhrases(text, defaultLanguage); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a text with a provided language. Subscribes to the call asynchronously and prints * out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrases * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link PagedFlux} containing the key phrases of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<String> extractKeyPhrases(String text, String language) { try { return new PagedFlux<>(() -> withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesWithResponse(text, language, context))); } catch (RuntimeException ex) { return pagedFluxError(logger, ex); } } /** * Returns a list of strings denoting the key phrases in the input text. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Extract key phrases in a list of string inputs. Subscribes to the call asynchronously and prints out the * key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch(Iterable<String> textInputs) { return extractKeyPhrasesBatch(textInputs, defaultLanguage, null); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of string inputs with a provided language. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<ExtractKeyPhraseResult>> extractKeyPhrasesBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return extractKeyPhrasesBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a list of strings denoting the key phrases in the input text. See <a href="https: * for the list of enabled languages. * * <p>Extract key phrases in a list of TextDocumentInput with request options. Subscribes to the call asynchronously * and prints out the key phrases when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link ExtractKeyPhraseResult key phrases}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<ExtractKeyPhraseResult>>> extractKeyPhrasesBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> extractKeyPhraseAsyncClient.extractKeyPhrasesBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text) { try { return analyzeSentiment(text, defaultLanguage); } catch (RuntimeException ex) { return monoError(logger, ex); } } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentiment * * @param text the text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * * @return A {@link Mono} containing the {@link DocumentSentiment document sentiment} of the text. * * @throws NullPointerException if {@code text} is {@code null}. * @throws TextAnalyticsException if the response returned with an {@link TextAnalyticsError error}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentSentiment> analyzeSentiment(String text, String language) { return analyzeSentimentBatch(Collections.singletonList(text), language, null) .map(documentCollection -> { final Iterator<AnalyzeSentimentResult> iterator = documentCollection.iterator(); if (!iterator.hasNext()) { throw logger.logExceptionAsError( new IllegalStateException("An empty collection returned which is an unexpected error.")); } final AnalyzeSentimentResult sentimentResult = iterator.next(); if (sentimentResult.isError()) { throw logger.logExceptionAsError(Transforms.toTextAnalyticsException(sentimentResult.getError())); } return sentimentResult.getDocumentSentiment(); }); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * This method will use the default language that sets up in * {@link TextAnalyticsClientBuilder * the language. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment} of the text. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch(Iterable<String> textInputs) { return analyzeSentimentBatch(textInputs, defaultLanguage, null); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput. Subscribes to the call asynchronously and prints out the * sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatch * * @param textInputs A list of text to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param language The 2 letter ISO 639-1 representation of language for the text. If not set, uses "en" for * English as default. * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing the {@link DocumentResultCollection batch} of the * {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<DocumentResultCollection<AnalyzeSentimentResult>> analyzeSentimentBatch( Iterable<String> textInputs, String language, TextAnalyticsRequestOptions options) { return analyzeSentimentBatchWithResponse( mapByIndex(textInputs, (index, value) -> new TextDocumentInput(index, value, language)), options) .flatMap(FluxUtil::toMono); } /** * Returns a sentiment prediction, as well as confidence scores for each sentiment label (Positive, Negative, and * Neutral) for the document and each sentence within it. * * <p>Analyze sentiment in a list of TextDocumentInput with provided request options. Subscribes to the call * asynchronously and prints out the sentiment details when a response is received.</p> * * {@codesnippet com.azure.ai.textanalytics.TextAnalyticsAsyncClient.analyzeSentimentBatchWithResponse * * @param textInputs A list of {@link TextDocumentInput inputs/documents} to be analyzed. * For text length limits, maximum batch size, and supported text encoding, see * <a href="https: * @param options The {@link TextAnalyticsRequestOptions options} to configure the scoring model for documents * and show statistics. * * @return A {@link Mono} containing a {@link Response} whose {@link Response * {@link DocumentResultCollection batch} of {@link AnalyzeSentimentResult text sentiment}. * * @throws NullPointerException if {@code textInputs} is {@code null}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options) { try { return withContext(context -> analyzeSentimentAsyncClient.analyzeSentimentBatchWithResponse(textInputs, options, context)); } catch (RuntimeException ex) { return monoError(logger, ex); } } }
You don't want to explore the setter to users and it is bad to change auto-generated code. It could be removed when doing code-gene with some settings. Created an issue to regenerate code base: https://github.com/Azure/azure-sdk-for-java/issues/8344
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
This one is not autogenerated one and you don't want to explore an autogenerated class to user. Another place that using autogenerated one is for internal mapping to non-autogenerated class.
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(),
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
is there a reason why there are so many format changes in the PR?
public void detectLanguage() { String inputText = "Bonjour tout le monde"; textAnalyticsAsyncClient.detectLanguage(inputText).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); }
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()));
public void detectLanguage() { String inputText = "Bonjour tout le monde"; textAnalyticsAsyncClient.detectLanguage(inputText).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); }
class TextAnalyticsAsyncClientJavaDocCodeSnippets { TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient(); /** * Code snippet for creating a {@link TextAnalyticsAsyncClient} * * @return The TextAnalyticsAsyncClient object */ public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() { TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); return textAnalyticsAsyncClient; } /** * Code snippet for updating the existing API key. */ public void rotateApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildAsyncClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageWithCountryHint() { String input = "This text is in English"; String countryHint = "US"; textAnalyticsAsyncClient.detectLanguage(input, countryHint).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringList() { final List<String> textInputs = Arrays.asList( "This is written in English", "Este es un document escrito en Español."); textAnalyticsAsyncClient.detectLanguageBatch(textInputs).subscribe(batchResult -> { final TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "This is written in English", "Este es un document escrito en Español." ); textAnalyticsAsyncClient.detectLanguageBatch(textInputs1, "US", null).subscribe( batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void detectBatchLanguagesWithResponse() { List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "US"), new DetectLanguageInput("2", "Este es un document escrito en Español.", "es") ); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<DetectLanguageResult> batchResult = response.getValue(); TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntities() { String inputText = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText) .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesWithLanguage() { String inputText1 = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText1, "en") .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesStringList() { List<String> textInputs = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft." ); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs).subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized entity: %s, entity category: %s, entity sub-category: %s, offset: %s, length: %s," + " score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory() == null || entity.getSubCategory().isEmpty() ? "N/A" : entity.getSubCategory(), entity.getOffset(), entity.getLength(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntities() { String inputText = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText).subscribe(piiEntity -> System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesWithLanguage() { String inputText1 = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText1, "en") .subscribe(entity -> System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesStringList() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111."); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs).subscribe(recognizeEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizePiiEntitiesStringListWithOptions() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111." ); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs, "US", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : batchResult) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchPiiEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "My SSN is 555-55-5555."), new TextDocumentInput("1", "Visa card 0111 1111 1111 1111.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizePiiEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntities() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText).subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesWithLanguage() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText, "en") .subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesStringList() { List<String> textInputs = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs).subscribe(recognizeLinkedEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeLinkedEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : batchResult) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchLinkedEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park."), new TextDocumentInput("1", "Mount Shasta has lenticular clouds.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrases() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesWithLanguage() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr") .subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesStringList() { List<String> textInputs = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs).subscribe(extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractKeyPhrasesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs1, "en", null).subscribe( extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractBatchKeyPhrasesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentiment() { String inputText = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText).subscribe(documentSentiment -> { System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf( "Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentWithLanguage() { String inputText1 = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText1, "en") .subscribe(documentSentiment -> { System.out.printf("Recognized sentiment class: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentStringList() { List<String> textInputs = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi."); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs).subscribe(analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeSentimentStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi." ); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs1, "en", null).subscribe( analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, " + "neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeBatchSentimentWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "The hotel was dark and unclean."), new TextDocumentInput("1", "The restaurant had amazing gnocchi.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<AnalyzeSentimentResult> analyzeSentimentResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } }
class TextAnalyticsAsyncClientJavaDocCodeSnippets { TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient(); /** * Code snippet for creating a {@link TextAnalyticsAsyncClient} * * @return The TextAnalyticsAsyncClient object */ public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() { TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); return textAnalyticsAsyncClient; } /** * Code snippet for updating the existing API key. */ public void rotateApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildAsyncClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageWithCountryHint() { String input = "This text is in English"; String countryHint = "US"; textAnalyticsAsyncClient.detectLanguage(input, countryHint).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringList() { final List<String> textInputs = Arrays.asList( "This is written in English", "Este es un document escrito en Español."); textAnalyticsAsyncClient.detectLanguageBatch(textInputs).subscribe(batchResult -> { final TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "This is written in English", "Este es un document escrito en Español." ); textAnalyticsAsyncClient.detectLanguageBatch(textInputs1, "US", null).subscribe( batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void detectBatchLanguagesWithResponse() { List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "US"), new DetectLanguageInput("2", "Este es un document escrito en Español.", "es") ); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<DetectLanguageResult> batchResult = response.getValue(); TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntities() { String inputText = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText) .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesWithLanguage() { String inputText1 = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText1, "en") .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesStringList() { List<String> textInputs = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft." ); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs).subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized entity: %s, entity category: %s, entity sub-category: %s, offset: %s, length: %s," + " score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory() == null || entity.getSubCategory().isEmpty() ? "N/A" : entity.getSubCategory(), entity.getOffset(), entity.getLength(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntities() { String inputText = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText).subscribe(piiEntity -> System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesWithLanguage() { String inputText1 = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText1, "en") .subscribe(entity -> System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesStringList() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111."); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs).subscribe(recognizeEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizePiiEntitiesStringListWithOptions() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111." ); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs, "US", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : batchResult) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchPiiEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "My SSN is 555-55-5555."), new TextDocumentInput("1", "Visa card 0111 1111 1111 1111.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizePiiEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntities() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText).subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesWithLanguage() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText, "en") .subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesStringList() { List<String> textInputs = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs).subscribe(recognizeLinkedEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeLinkedEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : batchResult) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchLinkedEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park."), new TextDocumentInput("1", "Mount Shasta has lenticular clouds.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrases() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesWithLanguage() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr") .subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesStringList() { List<String> textInputs = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs).subscribe(extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractKeyPhrasesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs1, "en", null).subscribe( extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractBatchKeyPhrasesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentiment() { String inputText = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText).subscribe(documentSentiment -> { System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf( "Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentWithLanguage() { String inputText1 = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText1, "en") .subscribe(documentSentiment -> { System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentStringList() { List<String> textInputs = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi."); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs).subscribe(analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeSentimentStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi." ); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs1, "en", null).subscribe( analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, " + "neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeBatchSentimentWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "The hotel was dark and unclean."), new TextDocumentInput("1", "The restaurant had amazing gnocchi.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<AnalyzeSentimentResult> analyzeSentimentResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } }
short line. Java use a tool to embeded code to readme, so maintaining a short line number is better for user experience.
public void detectLanguage() { String inputText = "Bonjour tout le monde"; textAnalyticsAsyncClient.detectLanguage(inputText).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); }
detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()));
public void detectLanguage() { String inputText = "Bonjour tout le monde"; textAnalyticsAsyncClient.detectLanguage(inputText).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); }
class TextAnalyticsAsyncClientJavaDocCodeSnippets { TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient(); /** * Code snippet for creating a {@link TextAnalyticsAsyncClient} * * @return The TextAnalyticsAsyncClient object */ public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() { TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); return textAnalyticsAsyncClient; } /** * Code snippet for updating the existing API key. */ public void rotateApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildAsyncClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageWithCountryHint() { String input = "This text is in English"; String countryHint = "US"; textAnalyticsAsyncClient.detectLanguage(input, countryHint).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringList() { final List<String> textInputs = Arrays.asList( "This is written in English", "Este es un document escrito en Español."); textAnalyticsAsyncClient.detectLanguageBatch(textInputs).subscribe(batchResult -> { final TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "This is written in English", "Este es un document escrito en Español." ); textAnalyticsAsyncClient.detectLanguageBatch(textInputs1, "US", null).subscribe( batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void detectBatchLanguagesWithResponse() { List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "US"), new DetectLanguageInput("2", "Este es un document escrito en Español.", "es") ); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<DetectLanguageResult> batchResult = response.getValue(); TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntities() { String inputText = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText) .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesWithLanguage() { String inputText1 = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText1, "en") .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesStringList() { List<String> textInputs = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft." ); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs).subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized entity: %s, entity category: %s, entity sub-category: %s, offset: %s, length: %s," + " score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory() == null || entity.getSubCategory().isEmpty() ? "N/A" : entity.getSubCategory(), entity.getOffset(), entity.getLength(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntities() { String inputText = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText).subscribe(piiEntity -> System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesWithLanguage() { String inputText1 = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText1, "en") .subscribe(entity -> System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesStringList() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111."); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs).subscribe(recognizeEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizePiiEntitiesStringListWithOptions() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111." ); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs, "US", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : batchResult) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchPiiEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "My SSN is 555-55-5555."), new TextDocumentInput("1", "Visa card 0111 1111 1111 1111.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizePiiEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized PII entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntities() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText).subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesWithLanguage() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText, "en") .subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesStringList() { List<String> textInputs = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs).subscribe(recognizeLinkedEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeLinkedEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : batchResult) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchLinkedEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park."), new TextDocumentInput("1", "Mount Shasta has lenticular clouds.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, ID: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceLinkId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrases() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesWithLanguage() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr") .subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesStringList() { List<String> textInputs = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs).subscribe(extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractKeyPhrasesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs1, "en", null).subscribe( extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractBatchKeyPhrasesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentiment() { String inputText = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText).subscribe(documentSentiment -> { System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf( "Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentWithLanguage() { String inputText1 = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText1, "en") .subscribe(documentSentiment -> { System.out.printf("Recognized sentiment class: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentStringList() { List<String> textInputs = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi."); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs).subscribe(analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeSentimentStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi." ); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs1, "en", null).subscribe( analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, " + "neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeBatchSentimentWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "The hotel was dark and unclean."), new TextDocumentInput("1", "The restaurant had amazing gnocchi.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<AnalyzeSentimentResult> analyzeSentimentResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } }
class TextAnalyticsAsyncClientJavaDocCodeSnippets { TextAnalyticsAsyncClient textAnalyticsAsyncClient = createTextAnalyticsAsyncClient(); /** * Code snippet for creating a {@link TextAnalyticsAsyncClient} * * @return The TextAnalyticsAsyncClient object */ public TextAnalyticsAsyncClient createTextAnalyticsAsyncClient() { TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); return textAnalyticsAsyncClient; } /** * Code snippet for updating the existing API key. */ public void rotateApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsAsyncClient textAnalyticsAsyncClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildAsyncClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageWithCountryHint() { String input = "This text is in English"; String countryHint = "US"; textAnalyticsAsyncClient.detectLanguage(input, countryHint).subscribe(detectedLanguage -> System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringList() { final List<String> textInputs = Arrays.asList( "This is written in English", "Este es un document escrito en Español."); textAnalyticsAsyncClient.detectLanguageBatch(textInputs).subscribe(batchResult -> { final TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void detectLanguageStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "This is written in English", "Este es un document escrito en Español." ); textAnalyticsAsyncClient.detectLanguageBatch(textInputs1, "US", null).subscribe( batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void detectBatchLanguagesWithResponse() { List<DetectLanguageInput> detectLanguageInputs1 = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "US"), new DetectLanguageInput("2", "Este es un document escrito en Español.", "es") ); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.detectLanguageBatchWithResponse(detectLanguageInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<DetectLanguageResult> batchResult = response.getValue(); TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (DetectLanguageResult detectLanguageResult : batchResult) { DetectedLanguage detectedLanguage = detectLanguageResult.getPrimaryLanguage(); System.out.printf("Detected language name: %s, ISO 6391 Name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntities() { String inputText = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText) .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesWithLanguage() { String inputText1 = "Satya Nadella is the CEO of Microsoft"; textAnalyticsAsyncClient.recognizeEntities(inputText1, "en") .subscribe(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeEntitiesStringList() { List<String> textInputs = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft." ); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs).subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized entity: %s, entity category: %s, entity sub-category: %s, offset: %s, length: %s," + " score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory() == null || entity.getSubCategory().isEmpty() ? "N/A" : entity.getSubCategory(), entity.getOffset(), entity.getLength(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "I had a wonderful trip to Seattle last week.", "I work at Microsoft."); textAnalyticsAsyncClient.recognizeEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : batchResult) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf("Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (CategorizedEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntities() { String inputText = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText).subscribe(piiEntity -> System.out.printf( "Recognized categorized entity: %s, category: %s, score: %.2f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesWithLanguage() { String inputText1 = "My SSN is 555-55-5555"; textAnalyticsAsyncClient.recognizePiiEntities(inputText1, "en") .subscribe(entity -> System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore())); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizePiiEntitiesStringList() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111."); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs).subscribe(recognizeEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizePiiEntitiesStringListWithOptions() { List<String> textInputs = Arrays.asList( "My SSN is 555-55-5555.", "Visa card 0111 1111 1111 1111." ); textAnalyticsAsyncClient.recognizePiiEntitiesBatch(textInputs, "US", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : batchResult) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchPiiEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "My SSN is 555-55-5555."), new TextDocumentInput("1", "Visa card 0111 1111 1111 1111.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizePiiEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizePiiEntitiesResult> recognizeEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizePiiEntitiesResult recognizeEntitiesResult : recognizeEntitiesResults) { for (PiiEntity entity : recognizeEntitiesResult.getEntities()) { System.out.printf( "Recognized Personally Identifiable Information entity: %s, category: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getScore()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntities() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText).subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesWithLanguage() { String inputText = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsAsyncClient.recognizeLinkedEntities(inputText, "en") .subscribe(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void recognizeLinkedEntitiesStringList() { List<String> textInputs = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs).subscribe(recognizeLinkedEntitiesResults -> { TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeLinkedEntitiesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Old Faithful is a geyser at Yellowstone Park.", "Mount Shasta has lenticular clouds." ); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatch(textInputs1, "en", null) .subscribe(batchResult -> { TextDocumentBatchStatistics batchStatistics = batchResult.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : batchResult) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void recognizeBatchLinkedEntitiesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "Old Faithful is a geyser at Yellowstone Park."), new TextDocumentInput("1", "Mount Shasta has lenticular clouds.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.recognizeLinkedEntitiesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = recognizeLinkedEntitiesResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (RecognizeLinkedEntitiesResult recognizeLinkedEntitiesResult : recognizeLinkedEntitiesResults) { for (LinkedEntity linkedEntity : recognizeLinkedEntitiesResult.getEntities()) { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); for (LinkedEntityMatch linkedEntityMatch : linkedEntity.getLinkedEntityMatches()) { System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getOffset(), linkedEntityMatch.getLength(), linkedEntityMatch.getScore()); } } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrases() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde").subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesWithLanguage() { System.out.println("Extracted phrases:"); textAnalyticsAsyncClient.extractKeyPhrases("Bonjour tout le monde", "fr") .subscribe(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void extractKeyPhrasesStringList() { List<String> textInputs = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs).subscribe(extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractKeyPhrasesStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "Hello world. This is some input text that I love.", "Bonjour tout le monde"); textAnalyticsAsyncClient.extractKeyPhrasesBatch(textInputs1, "en", null).subscribe( extractKeyPhraseResults -> { TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void extractBatchKeyPhrasesWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "I had a wonderful trip to Seattle last week."), new TextDocumentInput("1", "I work at Microsoft.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.extractKeyPhrasesBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<ExtractKeyPhraseResult> extractKeyPhraseResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = extractKeyPhraseResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (ExtractKeyPhraseResult extractKeyPhraseResult : extractKeyPhraseResults) { System.out.println("Extracted phrases:"); for (String keyPhrase : extractKeyPhraseResult.getKeyPhrases()) { System.out.printf("%s.%n", keyPhrase); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentiment() { String inputText = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText).subscribe(documentSentiment -> { System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf( "Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentWithLanguage() { String inputText1 = "The hotel was dark and unclean."; textAnalyticsAsyncClient.analyzeSentiment(inputText1, "en") .subscribe(documentSentiment -> { System.out.printf("Recognized sentiment label: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient */ public void analyzeSentimentStringList() { List<String> textInputs = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi."); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs).subscribe(analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeSentimentStringListWithOptions() { List<String> textInputs1 = Arrays.asList( "The hotel was dark and unclean.", "The restaurant had amazing gnocchi." ); textAnalyticsAsyncClient.analyzeSentimentBatch(textInputs1, "en", null).subscribe( analyzeSentimentResults -> { TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, " + "neutral score: %.2f, negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } /** * Code snippet for {@link TextAnalyticsAsyncClient * TextAnalyticsRequestOptions)} */ public void analyzeBatchSentimentWithResponse() { List<TextDocumentInput> textDocumentInputs1 = Arrays.asList( new TextDocumentInput("0", "The hotel was dark and unclean."), new TextDocumentInput("1", "The restaurant had amazing gnocchi.")); TextAnalyticsRequestOptions requestOptions = new TextAnalyticsRequestOptions().setShowStatistics(true); textAnalyticsAsyncClient.analyzeSentimentBatchWithResponse(textDocumentInputs1, requestOptions) .subscribe(response -> { DocumentResultCollection<AnalyzeSentimentResult> analyzeSentimentResults = response.getValue(); TextDocumentBatchStatistics batchStatistics = analyzeSentimentResults.getStatistics(); System.out.printf("Batch statistics, transaction count: %s, valid document count: %s.%n", batchStatistics.getTransactionCount(), batchStatistics.getValidDocumentCount()); for (AnalyzeSentimentResult analyzeSentimentResult : analyzeSentimentResults) { System.out.printf("Document ID: %s%n", analyzeSentimentResult.getId()); DocumentSentiment documentSentiment = analyzeSentimentResult.getDocumentSentiment(); System.out.printf("Recognized document sentiment: %s.%n", documentSentiment.getSentiment()); for (SentenceSentiment sentenceSentiment : documentSentiment.getSentences()) { System.out.printf("Recognized sentence sentiment: %s, positive score: %.2f, neutral score: %.2f, " + "negative score: %.2f.%n", sentenceSentiment.getSentiment(), sentenceSentiment.getConfidenceScores().getPositive(), sentenceSentiment.getConfidenceScores().getNeutral(), sentenceSentiment.getConfidenceScores().getNegative()); } } }); } }
ugh so confusing. Thanks for clarifying it Shawn :)
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(),
private AnalyzeSentimentResult convertToAnalyzeSentimentResult(final DocumentSentiment documentSentiment) { final SentimentLabel documentSentimentLabel = SentimentLabel.fromString(documentSentiment. getSentiment().toString()); if (documentSentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", documentSentiment.getSentiment()))); } final com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getDocumentScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { SentimentLabel sentimentLabel = SentimentLabel.fromString(sentenceSentiment .getSentiment().toString()); if (sentimentLabel == null) { logger.logExceptionAsWarning( new RuntimeException(String.format(Locale.ROOT, "'%s' is not valid text sentiment.", sentenceSentiment.getSentiment()))); } com.azure.ai.textanalytics.implementation.models.SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getSentenceScores(); return new SentenceSentiment( sentimentLabel, new SentimentConfidenceScorePerLabel(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive()), sentenceSentiment.getLength(), sentenceSentiment.getOffset()); }).collect(Collectors.toList()); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( documentSentimentLabel, new SentimentConfidenceScorePerLabel( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), sentenceSentiments)); }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
class AnalyzeSentimentAsyncClient { private final ClientLogger logger = new ClientLogger(AnalyzeSentimentAsyncClient.class); private final TextAnalyticsClientImpl service; /** * Create a {@code AnalyzeSentimentAsyncClient} that sends requests to the Text Analytics services's sentiment * analysis endpoint. * * @param service The proxy service used to perform REST calls. */ AnalyzeSentimentAsyncClient(TextAnalyticsClientImpl service) { this.service = service; } Mono<Response<DocumentResultCollection<AnalyzeSentimentResult>>> analyzeSentimentBatchWithResponse( Iterable<TextDocumentInput> textInputs, TextAnalyticsRequestOptions options, Context context) { Objects.requireNonNull(textInputs, "'textInputs' cannot be null."); final MultiLanguageBatchInput batchInput = new MultiLanguageBatchInput() .setDocuments(toMultiLanguageInput(textInputs)); return service.sentimentWithRestResponseAsync( batchInput, options == null ? null : options.getModelVersion(), options == null ? null : options.showStatistics(), context) .doOnSubscribe(ignoredValue -> logger.info("A batch of text sentiment input - {}", textInputs.toString())) .doOnSuccess(response -> logger.info("A batch of text sentiment output - {}", response)) .doOnError(error -> logger.warning("Failed to analyze text sentiment - {}", error)) .map(response -> new SimpleResponse<>(response, toDocumentResultCollection(response.getValue()))); } /** * Helper method to convert the service response of {@link SentimentResponse} to {@link DocumentResultCollection}. * * @param sentimentResponse the {@link SentimentResponse} returned by the service. * * @return the {@link DocumentResultCollection} of {@link AnalyzeSentimentResult} to be returned by the SDK. */ private DocumentResultCollection<AnalyzeSentimentResult> toDocumentResultCollection( final SentimentResponse sentimentResponse) { List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); for (DocumentSentiment documentSentiment : sentimentResponse.getDocuments()) { analyzeSentimentResults.add(convertToAnalyzeSentimentResult(documentSentiment)); } for (DocumentError documentError : sentimentResponse.getErrors()) { final com.azure.ai.textanalytics.models.TextAnalyticsError error = toTextAnalyticsError(documentError.getError()); analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, error, null)); } return new DocumentResultCollection<>(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment the {@link DocumentSentiment} returned by the service. * * @return the {@link AnalyzeSentimentResult} to be returned by the SDK. */ }
Can you catch `Exception` here? We don't want any exception to be directly thrown
public Mono<AccessToken> getToken(TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId + "/"; if (pubClient == null) { try { PersistentTokenCacheAccessAspect accessAspect = new PersistentTokenCacheAccessAspect(); pubClient = PublicClientApplication.builder(this.clientId) .authority(authorityUrl) .setTokenCacheAccessAspect(accessAspect) .build(); } catch (IOException | PlatformNotSupportedException e) { return Mono.error(e); } } return Mono.fromFuture(pubClient.getAccounts()) .flatMap(set -> { IAccount requestedAccount; Map<String, IAccount> accounts = new HashMap<>(); for (IAccount cached : set) { if (username == null || username.equals(cached.username())) { if (!accounts.containsKey(cached.homeAccountId())) { accounts.put(cached.homeAccountId(), cached); } } } if (accounts.size() == 0) { if (username == null) { return Mono.error(new RuntimeException("No accounts were discovered in the shared token cache." + " To fix, authenticate through tooling supporting azure developer sign on.")); } else { return Mono.error(new RuntimeException(String.format("User account '%s' was not found in the " + "shared token cache. Discovered Accounts: [ '%s' ]", username, set.stream() .map(IAccount::username).distinct().collect(Collectors.joining(", "))))); } } else if (accounts.size() > 1) { if (username == null) { return Mono.error(new RuntimeException("Multiple accounts were discovered in the shared token " + "cache. To fix, set the AZURE_USERNAME and AZURE_TENANT_ID environment variable to the " + "preferred username, or specify it when constructing SharedTokenCacheCredential.")); } else { return Mono.error(new RuntimeException("Multiple entries for the user account " + username + " were found in the shared token cache. This is not currently supported by the" + " SharedTokenCacheCredential.")); } } else { requestedAccount = accounts.values().iterator().next(); } SilentParameters params = SilentParameters.builder( new HashSet<>(request.getScopes()), requestedAccount) .authorityUrl(authorityUrl) .build(); CompletableFuture<IAuthenticationResult> future; try { future = pubClient.acquireTokenSilently(params); return Mono.fromFuture(() -> future).map(result -> new AccessToken(result.accessToken(), result.expiresOnDate().toInstant().atOffset(ZoneOffset.UTC))); } catch (MalformedURLException e) { e.printStackTrace(); return Mono.error(new RuntimeException("Token was not found")); } }); }
} catch (IOException | PlatformNotSupportedException e) {
public Mono<AccessToken> getToken(TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId + "/"; if (pubClient == null) { try { PersistentTokenCacheAccessAspect accessAspect = new PersistentTokenCacheAccessAspect(); pubClient = PublicClientApplication.builder(this.clientId) .authority(authorityUrl) .setTokenCacheAccessAspect(accessAspect) .build(); } catch (Exception e) { return Mono.error(e); } } return Mono.fromFuture(pubClient.getAccounts()) .flatMap(set -> { IAccount requestedAccount; Map<String, IAccount> accounts = new HashMap<>(); for (IAccount cached : set) { if (username == null || username.equals(cached.username())) { if (!accounts.containsKey(cached.homeAccountId())) { accounts.put(cached.homeAccountId(), cached); } } } if (accounts.size() == 0) { if (username == null) { return Mono.error(new RuntimeException("No accounts were discovered in the shared token cache." + " To fix, authenticate through tooling supporting azure developer sign on.")); } else { return Mono.error(new RuntimeException(String.format("User account '%s' was not found in the " + "shared token cache. Discovered Accounts: [ '%s' ]", username, set.stream() .map(IAccount::username).distinct().collect(Collectors.joining(", "))))); } } else if (accounts.size() > 1) { if (username == null) { return Mono.error(new RuntimeException("Multiple accounts were discovered in the shared token " + "cache. To fix, set the AZURE_USERNAME and AZURE_TENANT_ID environment variable to the " + "preferred username, or specify it when constructing SharedTokenCacheCredential.")); } else { return Mono.error(new RuntimeException("Multiple entries for the user account " + username + " were found in the shared token cache. This is not currently supported by the" + " SharedTokenCacheCredential.")); } } else { requestedAccount = accounts.values().iterator().next(); } SilentParameters params = SilentParameters.builder( new HashSet<>(request.getScopes()), requestedAccount) .authorityUrl(authorityUrl) .build(); CompletableFuture<IAuthenticationResult> future; try { future = pubClient.acquireTokenSilently(params); return Mono.fromFuture(() -> future).map(result -> new AccessToken(result.accessToken(), result.expiresOnDate().toInstant().atOffset(ZoneOffset.UTC))); } catch (MalformedURLException e) { e.printStackTrace(); return Mono.error(new RuntimeException("Token was not found")); } }); }
class SharedTokenCacheCredential implements TokenCredential { private final String username; private final String clientId; private final String tenantId; private final IdentityClientOptions options; private PublicClientApplication pubClient = null; /** * Creates an instance of the Shared Token Cache Credential Provider. * * @param username the username of the account for the application * @param clientId the client ID of the application * @param identityClientOptions the options for configuring the identity client */ SharedTokenCacheCredential(String username, String clientId, String tenantId, IdentityClientOptions identityClientOptions) { Configuration configuration = Configuration.getGlobalConfiguration().clone(); if (username == null) { this.username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME); } else { this.username = username; } if (clientId == null) { this.clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID); } else { this.clientId = clientId; } if (tenantId == null) { this.tenantId = configuration.contains(Configuration.PROPERTY_AZURE_TENANT_ID) ? configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID) : "common"; } else { this.tenantId = tenantId; } this.options = identityClientOptions; } /** * Gets token from shared token cache * */ @Override }
class SharedTokenCacheCredential implements TokenCredential { private final String username; private final String clientId; private final String tenantId; private final IdentityClientOptions options; private PublicClientApplication pubClient = null; /** * Creates an instance of the Shared Token Cache Credential Provider. * * @param username the username of the account for the application * @param clientId the client ID of the application * @param identityClientOptions the options for configuring the identity client */ SharedTokenCacheCredential(String username, String clientId, String tenantId, IdentityClientOptions identityClientOptions) { Configuration configuration = Configuration.getGlobalConfiguration().clone(); if (username == null) { this.username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME); } else { this.username = username; } if (clientId == null) { this.clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID); } else { this.clientId = clientId; } if (tenantId == null) { this.tenantId = configuration.contains(Configuration.PROPERTY_AZURE_TENANT_ID) ? configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID) : "common"; } else { this.tenantId = tenantId; } this.options = identityClientOptions; } /** * Gets token from shared token cache * */ @Override }
done
public Mono<AccessToken> getToken(TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId + "/"; if (pubClient == null) { try { PersistentTokenCacheAccessAspect accessAspect = new PersistentTokenCacheAccessAspect(); pubClient = PublicClientApplication.builder(this.clientId) .authority(authorityUrl) .setTokenCacheAccessAspect(accessAspect) .build(); } catch (IOException | PlatformNotSupportedException e) { return Mono.error(e); } } return Mono.fromFuture(pubClient.getAccounts()) .flatMap(set -> { IAccount requestedAccount; Map<String, IAccount> accounts = new HashMap<>(); for (IAccount cached : set) { if (username == null || username.equals(cached.username())) { if (!accounts.containsKey(cached.homeAccountId())) { accounts.put(cached.homeAccountId(), cached); } } } if (accounts.size() == 0) { if (username == null) { return Mono.error(new RuntimeException("No accounts were discovered in the shared token cache." + " To fix, authenticate through tooling supporting azure developer sign on.")); } else { return Mono.error(new RuntimeException(String.format("User account '%s' was not found in the " + "shared token cache. Discovered Accounts: [ '%s' ]", username, set.stream() .map(IAccount::username).distinct().collect(Collectors.joining(", "))))); } } else if (accounts.size() > 1) { if (username == null) { return Mono.error(new RuntimeException("Multiple accounts were discovered in the shared token " + "cache. To fix, set the AZURE_USERNAME and AZURE_TENANT_ID environment variable to the " + "preferred username, or specify it when constructing SharedTokenCacheCredential.")); } else { return Mono.error(new RuntimeException("Multiple entries for the user account " + username + " were found in the shared token cache. This is not currently supported by the" + " SharedTokenCacheCredential.")); } } else { requestedAccount = accounts.values().iterator().next(); } SilentParameters params = SilentParameters.builder( new HashSet<>(request.getScopes()), requestedAccount) .authorityUrl(authorityUrl) .build(); CompletableFuture<IAuthenticationResult> future; try { future = pubClient.acquireTokenSilently(params); return Mono.fromFuture(() -> future).map(result -> new AccessToken(result.accessToken(), result.expiresOnDate().toInstant().atOffset(ZoneOffset.UTC))); } catch (MalformedURLException e) { e.printStackTrace(); return Mono.error(new RuntimeException("Token was not found")); } }); }
} catch (IOException | PlatformNotSupportedException e) {
public Mono<AccessToken> getToken(TokenRequestContext request) { String authorityUrl = options.getAuthorityHost().replaceAll("/+$", "") + "/" + tenantId + "/"; if (pubClient == null) { try { PersistentTokenCacheAccessAspect accessAspect = new PersistentTokenCacheAccessAspect(); pubClient = PublicClientApplication.builder(this.clientId) .authority(authorityUrl) .setTokenCacheAccessAspect(accessAspect) .build(); } catch (Exception e) { return Mono.error(e); } } return Mono.fromFuture(pubClient.getAccounts()) .flatMap(set -> { IAccount requestedAccount; Map<String, IAccount> accounts = new HashMap<>(); for (IAccount cached : set) { if (username == null || username.equals(cached.username())) { if (!accounts.containsKey(cached.homeAccountId())) { accounts.put(cached.homeAccountId(), cached); } } } if (accounts.size() == 0) { if (username == null) { return Mono.error(new RuntimeException("No accounts were discovered in the shared token cache." + " To fix, authenticate through tooling supporting azure developer sign on.")); } else { return Mono.error(new RuntimeException(String.format("User account '%s' was not found in the " + "shared token cache. Discovered Accounts: [ '%s' ]", username, set.stream() .map(IAccount::username).distinct().collect(Collectors.joining(", "))))); } } else if (accounts.size() > 1) { if (username == null) { return Mono.error(new RuntimeException("Multiple accounts were discovered in the shared token " + "cache. To fix, set the AZURE_USERNAME and AZURE_TENANT_ID environment variable to the " + "preferred username, or specify it when constructing SharedTokenCacheCredential.")); } else { return Mono.error(new RuntimeException("Multiple entries for the user account " + username + " were found in the shared token cache. This is not currently supported by the" + " SharedTokenCacheCredential.")); } } else { requestedAccount = accounts.values().iterator().next(); } SilentParameters params = SilentParameters.builder( new HashSet<>(request.getScopes()), requestedAccount) .authorityUrl(authorityUrl) .build(); CompletableFuture<IAuthenticationResult> future; try { future = pubClient.acquireTokenSilently(params); return Mono.fromFuture(() -> future).map(result -> new AccessToken(result.accessToken(), result.expiresOnDate().toInstant().atOffset(ZoneOffset.UTC))); } catch (MalformedURLException e) { e.printStackTrace(); return Mono.error(new RuntimeException("Token was not found")); } }); }
class SharedTokenCacheCredential implements TokenCredential { private final String username; private final String clientId; private final String tenantId; private final IdentityClientOptions options; private PublicClientApplication pubClient = null; /** * Creates an instance of the Shared Token Cache Credential Provider. * * @param username the username of the account for the application * @param clientId the client ID of the application * @param identityClientOptions the options for configuring the identity client */ SharedTokenCacheCredential(String username, String clientId, String tenantId, IdentityClientOptions identityClientOptions) { Configuration configuration = Configuration.getGlobalConfiguration().clone(); if (username == null) { this.username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME); } else { this.username = username; } if (clientId == null) { this.clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID); } else { this.clientId = clientId; } if (tenantId == null) { this.tenantId = configuration.contains(Configuration.PROPERTY_AZURE_TENANT_ID) ? configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID) : "common"; } else { this.tenantId = tenantId; } this.options = identityClientOptions; } /** * Gets token from shared token cache * */ @Override }
class SharedTokenCacheCredential implements TokenCredential { private final String username; private final String clientId; private final String tenantId; private final IdentityClientOptions options; private PublicClientApplication pubClient = null; /** * Creates an instance of the Shared Token Cache Credential Provider. * * @param username the username of the account for the application * @param clientId the client ID of the application * @param identityClientOptions the options for configuring the identity client */ SharedTokenCacheCredential(String username, String clientId, String tenantId, IdentityClientOptions identityClientOptions) { Configuration configuration = Configuration.getGlobalConfiguration().clone(); if (username == null) { this.username = configuration.get(Configuration.PROPERTY_AZURE_USERNAME); } else { this.username = username; } if (clientId == null) { this.clientId = configuration.get(Configuration.PROPERTY_AZURE_CLIENT_ID); } else { this.clientId = clientId; } if (tenantId == null) { this.tenantId = configuration.contains(Configuration.PROPERTY_AZURE_TENANT_ID) ? configuration.get(Configuration.PROPERTY_AZURE_TENANT_ID) : "common"; } else { this.tenantId = tenantId; } this.options = identityClientOptions; } /** * Gets token from shared token cache * */ @Override }
if the method takes the Context parameter, shouldn't it be `withResponse`?
public void handlingException() { List<DetectLanguageInput> inputs = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "us"), new DetectLanguageInput("1", "Este es un document escrito en Español.", "es") ); try { textAnalyticsClient.detectLanguageBatch(inputs, null, Context.NONE); } catch (HttpResponseException e) { System.out.println(e.getMessage()); } }
textAnalyticsClient.detectLanguageBatch(inputs, null, Context.NONE);
public void handlingException() { List<DetectLanguageInput> inputs = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "us"), new DetectLanguageInput("1", "Este es un documento escrito en Español.", "es") ); try { textAnalyticsClient.detectLanguageBatch(inputs, null, Context.NONE); } catch (HttpResponseException e) { System.out.println(e.getMessage()); } }
class ReadmeSamples { private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient(); /** * Code snippet for configuring http client. */ public void configureHttpClient() { HttpClient client = new NettyAsyncHttpClientBuilder() .port(8080) .wiretap(true) .build(); } /** * Code snippet for getting sync client using the API key authentication. */ public void useApiKeySyncClient() { TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); } /** * Code snippet for getting async client using API key authentication. */ public void useApiKeyAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); } /** * Code snippet for getting async client using AAD authentication. */ public void useAadAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .endpoint("{endpoint}") .credential(new DefaultAzureCredentialBuilder().build()) .buildAsyncClient(); } /** * Code snippet for rotating API key of the client */ public void rotatingApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for handling exception */ /** * Code snippet for analyzing sentiment of a text. */ public void analyzeSentiment() { String text = "The hotel was dark and unclean. I like microsoft."; DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(text); System.out.printf("Analyzed document sentiment: %s.%n", documentSentiment.getSentiment()); documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf("Analyzed sentence sentiment: %s.%n", sentenceSentiment.getSentiment())); } /** * Code snippet for detecting language in a text. */ public void detectLanguages() { String inputText = "Bonjour tout le monde"; DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(inputText); System.out.printf("Detected language name: %s, ISO 6391 name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } /** * Code snippet for recognizing category entity in a text. */ public void recognizeCategorizedEntity() { String text = "Satya Nadella is the CEO of Microsoft"; textAnalyticsClient.recognizeCategorizedEntities(text).forEach(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, subCategory: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory(), entity.getScore())); } /** * Code snippet for recognizing Personally Identifiable Information entity in a text. */ public void recognizePiiEntity() { String text = "My SSN is 555-55-5555"; textAnalyticsClient.recognizePiiEntities(text).forEach(piiEntity -> System.out.printf("Recognized Personally Identifiable Information entity: %s, category: %s, subCategory: %s, score: %.2f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getSubCategory(), piiEntity.getScore())); } /** * Code snippet for recognizing linked entity in a text. */ public void recognizeLinkedEntity() { String text = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsClient.recognizeLinkedEntities(text).forEach(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); linkedEntity.getLinkedEntityMatches().forEach(linkedEntityMatch -> System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getGraphemeOffset(), linkedEntityMatch.getGraphemeLength(), linkedEntityMatch.getScore())); }); } /** * Code snippet for extracting key phrases in a text. */ public void extractKeyPhrases() { String text = "My cat might need to see a veterinarian."; System.out.println("Extracted phrases:"); textAnalyticsClient.extractKeyPhrases(text).forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } }
class ReadmeSamples { private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient(); /** * Code snippet for configuring http client. */ public void configureHttpClient() { HttpClient client = new NettyAsyncHttpClientBuilder() .port(8080) .wiretap(true) .build(); } /** * Code snippet for getting sync client using the API key authentication. */ public void useApiKeySyncClient() { TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); } /** * Code snippet for getting async client using API key authentication. */ public void useApiKeyAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); } /** * Code snippet for getting async client using AAD authentication. */ public void useAadAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .endpoint("{endpoint}") .credential(new DefaultAzureCredentialBuilder().build()) .buildAsyncClient(); } /** * Code snippet for rotating API key of the client */ public void rotatingApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for handling exception */ /** * Code snippet for analyzing sentiment of a text. */ public void analyzeSentiment() { String text = "The hotel was dark and unclean. I like microsoft."; DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(text); System.out.printf("Analyzed document sentiment: %s.%n", documentSentiment.getSentiment()); documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf("Analyzed sentence sentiment: %s.%n", sentenceSentiment.getSentiment())); } /** * Code snippet for detecting language in a text. */ public void detectLanguages() { String inputText = "Bonjour tout le monde"; DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(inputText); System.out.printf("Detected language name: %s, ISO 6391 name: %s, score: %f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } /** * Code snippet for recognizing category entity in a text. */ public void recognizeEntity() { String text = "Satya Nadella is the CEO of Microsoft"; textAnalyticsClient.recognizeEntities(text).forEach(entity -> System.out.printf("Recognized entity: %s, category: %s, subCategory: %s, score: %f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory(), entity.getConfidenceScore())); } /** * Code snippet for recognizing Personally Identifiable Information entity in a text. */ public void recognizePiiEntity() { String text = "My SSN is 555-55-5555"; textAnalyticsClient.recognizePiiEntities(text).forEach(piiEntity -> System.out.printf("Recognized Personally Identifiable Information entity: %s, category: %s, subCategory: %s, score: %f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getSubCategory(), piiEntity.getConfidenceScore())); } /** * Code snippet for recognizing linked entity in a text. */ public void recognizeLinkedEntity() { String text = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsClient.recognizeLinkedEntities(text).forEach(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); linkedEntity.getLinkedEntityMatches().forEach(linkedEntityMatch -> System.out.printf("Text: %s, score: %f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getConfidenceScore())); }); } /** * Code snippet for extracting key phrases in a text. */ public void extractKeyPhrases() { String text = "My cat might need to see a veterinarian."; System.out.println("Extracted phrases:"); textAnalyticsClient.extractKeyPhrases(text).forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } }
There will be no "withResponse" for pagination. See the pagination example: https://azure.github.io/azure-sdk/java_design.html#pagination
public void handlingException() { List<DetectLanguageInput> inputs = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "us"), new DetectLanguageInput("1", "Este es un document escrito en Español.", "es") ); try { textAnalyticsClient.detectLanguageBatch(inputs, null, Context.NONE); } catch (HttpResponseException e) { System.out.println(e.getMessage()); } }
textAnalyticsClient.detectLanguageBatch(inputs, null, Context.NONE);
public void handlingException() { List<DetectLanguageInput> inputs = Arrays.asList( new DetectLanguageInput("1", "This is written in English.", "us"), new DetectLanguageInput("1", "Este es un documento escrito en Español.", "es") ); try { textAnalyticsClient.detectLanguageBatch(inputs, null, Context.NONE); } catch (HttpResponseException e) { System.out.println(e.getMessage()); } }
class ReadmeSamples { private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient(); /** * Code snippet for configuring http client. */ public void configureHttpClient() { HttpClient client = new NettyAsyncHttpClientBuilder() .port(8080) .wiretap(true) .build(); } /** * Code snippet for getting sync client using the API key authentication. */ public void useApiKeySyncClient() { TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); } /** * Code snippet for getting async client using API key authentication. */ public void useApiKeyAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); } /** * Code snippet for getting async client using AAD authentication. */ public void useAadAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .endpoint("{endpoint}") .credential(new DefaultAzureCredentialBuilder().build()) .buildAsyncClient(); } /** * Code snippet for rotating API key of the client */ public void rotatingApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for handling exception */ /** * Code snippet for analyzing sentiment of a text. */ public void analyzeSentiment() { String text = "The hotel was dark and unclean. I like microsoft."; DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(text); System.out.printf("Analyzed document sentiment: %s.%n", documentSentiment.getSentiment()); documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf("Analyzed sentence sentiment: %s.%n", sentenceSentiment.getSentiment())); } /** * Code snippet for detecting language in a text. */ public void detectLanguages() { String inputText = "Bonjour tout le monde"; DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(inputText); System.out.printf("Detected language name: %s, ISO 6391 name: %s, score: %.2f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } /** * Code snippet for recognizing category entity in a text. */ public void recognizeCategorizedEntity() { String text = "Satya Nadella is the CEO of Microsoft"; textAnalyticsClient.recognizeCategorizedEntities(text).forEach(entity -> System.out.printf("Recognized categorized entity: %s, category: %s, subCategory: %s, score: %.2f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory(), entity.getScore())); } /** * Code snippet for recognizing Personally Identifiable Information entity in a text. */ public void recognizePiiEntity() { String text = "My SSN is 555-55-5555"; textAnalyticsClient.recognizePiiEntities(text).forEach(piiEntity -> System.out.printf("Recognized Personally Identifiable Information entity: %s, category: %s, subCategory: %s, score: %.2f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getSubCategory(), piiEntity.getScore())); } /** * Code snippet for recognizing linked entity in a text. */ public void recognizeLinkedEntity() { String text = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsClient.recognizeLinkedEntities(text).forEach(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); linkedEntity.getLinkedEntityMatches().forEach(linkedEntityMatch -> System.out.printf("Text: %s, offset: %s, length: %s, score: %.2f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getGraphemeOffset(), linkedEntityMatch.getGraphemeLength(), linkedEntityMatch.getScore())); }); } /** * Code snippet for extracting key phrases in a text. */ public void extractKeyPhrases() { String text = "My cat might need to see a veterinarian."; System.out.println("Extracted phrases:"); textAnalyticsClient.extractKeyPhrases(text).forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } }
class ReadmeSamples { private TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder().buildClient(); /** * Code snippet for configuring http client. */ public void configureHttpClient() { HttpClient client = new NettyAsyncHttpClientBuilder() .port(8080) .wiretap(true) .build(); } /** * Code snippet for getting sync client using the API key authentication. */ public void useApiKeySyncClient() { TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildClient(); } /** * Code snippet for getting async client using API key authentication. */ public void useApiKeyAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(new TextAnalyticsApiKeyCredential("{api_key}")) .endpoint("{endpoint}") .buildAsyncClient(); } /** * Code snippet for getting async client using AAD authentication. */ public void useAadAsyncClient() { TextAnalyticsAsyncClient textAnalyticsClient = new TextAnalyticsClientBuilder() .endpoint("{endpoint}") .credential(new DefaultAzureCredentialBuilder().build()) .buildAsyncClient(); } /** * Code snippet for rotating API key of the client */ public void rotatingApiKey() { TextAnalyticsApiKeyCredential credential = new TextAnalyticsApiKeyCredential("{api_key}"); TextAnalyticsClient textAnalyticsClient = new TextAnalyticsClientBuilder() .apiKey(credential) .endpoint("{endpoint}") .buildClient(); credential.updateCredential("{new_api_key}"); } /** * Code snippet for handling exception */ /** * Code snippet for analyzing sentiment of a text. */ public void analyzeSentiment() { String text = "The hotel was dark and unclean. I like microsoft."; DocumentSentiment documentSentiment = textAnalyticsClient.analyzeSentiment(text); System.out.printf("Analyzed document sentiment: %s.%n", documentSentiment.getSentiment()); documentSentiment.getSentences().forEach(sentenceSentiment -> System.out.printf("Analyzed sentence sentiment: %s.%n", sentenceSentiment.getSentiment())); } /** * Code snippet for detecting language in a text. */ public void detectLanguages() { String inputText = "Bonjour tout le monde"; DetectedLanguage detectedLanguage = textAnalyticsClient.detectLanguage(inputText); System.out.printf("Detected language name: %s, ISO 6391 name: %s, score: %f.%n", detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getScore()); } /** * Code snippet for recognizing category entity in a text. */ public void recognizeEntity() { String text = "Satya Nadella is the CEO of Microsoft"; textAnalyticsClient.recognizeEntities(text).forEach(entity -> System.out.printf("Recognized entity: %s, category: %s, subCategory: %s, score: %f.%n", entity.getText(), entity.getCategory(), entity.getSubCategory(), entity.getConfidenceScore())); } /** * Code snippet for recognizing Personally Identifiable Information entity in a text. */ public void recognizePiiEntity() { String text = "My SSN is 555-55-5555"; textAnalyticsClient.recognizePiiEntities(text).forEach(piiEntity -> System.out.printf("Recognized Personally Identifiable Information entity: %s, category: %s, subCategory: %s, score: %f.%n", piiEntity.getText(), piiEntity.getCategory(), piiEntity.getSubCategory(), piiEntity.getConfidenceScore())); } /** * Code snippet for recognizing linked entity in a text. */ public void recognizeLinkedEntity() { String text = "Old Faithful is a geyser at Yellowstone Park."; textAnalyticsClient.recognizeLinkedEntities(text).forEach(linkedEntity -> { System.out.println("Linked Entities:"); System.out.printf("Name: %s, entity ID in data source: %s, URL: %s, data source: %s.%n", linkedEntity.getName(), linkedEntity.getDataSourceEntityId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); linkedEntity.getLinkedEntityMatches().forEach(linkedEntityMatch -> System.out.printf("Text: %s, score: %f.%n", linkedEntityMatch.getText(), linkedEntityMatch.getConfidenceScore())); }); } /** * Code snippet for extracting key phrases in a text. */ public void extractKeyPhrases() { String text = "My cat might need to see a veterinarian."; System.out.println("Extracted phrases:"); textAnalyticsClient.extractKeyPhrases(text).forEach(keyPhrase -> System.out.printf("%s.%n", keyPhrase)); } }
where are we doing the initialization blocking call now? If the global endpoint manager is not fully initialized with first databaseAccount fetch could this return null?
private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.serviceEndpoint, this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }
DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount();
private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); assert(databaseAccount != null); this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider { private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean enableTransportClientSharing; private CosmosKeyCredential cosmosKeyCredential; private TokenResolver tokenResolver; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final HttpClient reactorHttpClient; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, TokenResolver tokenResolver, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverride, boolean enableTransportClientSharing) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverride, enableTransportClientSharing); this.tokenResolver = tokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean enableTransportClientSharing) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverrideEnabled, enableTransportClientSharing); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean enableTransportClientSharing) { logger.info( "Initializing DocumentClient with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); this.enableTransportClientSharing = enableTransportClientSharing; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.cosmosKeyCredential = cosmosKeyCredential; if (this.cosmosKeyCredential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)){ this.cosmosKeyCredential = new CosmosKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(); } boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; } public void init() { this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); this.collectionCache = new RxClientCollectionCache(this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } } private void initializeDirectConnectivity() { this.storeClientFactory = new StoreClientFactory( this.configs, this.connectionPolicy.getRequestTimeoutInMillis() / 1000, 0, this.userAgentContainer, this.enableTransportClientSharing ); this.addressResolver = new GlobalAddressResolver( this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { return new RxGatewayStoreModel(sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeoutInMillis(this.connectionPolicy.getIdleConnectionTimeoutInMillis()) .withPoolSize(this.connectionPolicy.getMaxPoolSize()) .withHttpProxy(this.connectionPolicy.getProxy()) .withRequestTimeoutInMillis(this.connectionPolicy.getRequestTimeoutInMillis()); return HttpClient.createFixed(httpClientConfig); } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient( this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, false ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, database, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(FeedOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResouceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResouceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResouceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResouceLink, Paths.USERS_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResouceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResouceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResouceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResouceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResouceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String queryResourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createDocumentQueryExecutionContextAsync(queryClient, resourceTypeEnum, klass, sqlQuery , options, queryResourceLink, false, activityId); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, FeedOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.DELETE); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.GET); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.GET); return gatewayProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.POST); return this.getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, FeedOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(Object[] objectArray) { String[] stringArray = new String[objectArray.length]; for (int i = 0; i < objectArray.length; ++i) { Object object = objectArray[i]; if (object instanceof JsonSerializable) { stringArray[i] = ((JsonSerializable) object).toJson(); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } if (options.getAccessCondition() != null) { if (options.getAccessCondition().getType() == AccessConditionType.IF_MATCH) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getAccessCondition().getCondition()); } else { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getAccessCondition().getCondition()); } } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.isPopulateQuotaInfo()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } return headers; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsString, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsString, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsString != null) { CosmosItemProperties cosmosItemProperties; if (objectDoc instanceof CosmosItemProperties) { cosmosItemProperties = (CosmosItemProperties) objectDoc; } else { cosmosItemProperties = new CosmosItemProperties(contentAsString); } partitionKeyInternal = extractPartitionKeyValueFromDocument(cosmosItemProperties, partitionKeyDefinition); } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } private static PartitionKeyInternal extractPartitionKeyValueFromDocument( CosmosItemProperties document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = document.getObjectByPath(parts); if (value == null || value.getClass() == ObjectNode.class) { value = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } String content = toJsonString(document, mapper); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private void populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.tokenResolver != null || this.cosmosKeyCredential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.tokenResolver != null) { return this.tokenResolver.getAuthorizationToken(requestVerb, resourceName, this.resolveCosmosResourceType(resourceType), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (cosmosKeyCredential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { try { return CosmosResourceType.valueOf(resourceType.toString()); } catch (IllegalArgumentException e) { return CosmosResourceType.System; } } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy retryPolicy) { populateHeaders(request, RequestVerb.POST); RxStoreModel storeProxy = this.getStoreProxy(request); if(request.requestContext != null && retryPolicy.getRetryCount() > 0) { retryPolicy.updateEndTime(); request.requestContext.updateRetryContext(retryPolicy, true); } return storeProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.POST); Map<String, String> headers = request.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PUT); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> { if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } return create(request, requestRetryPolicy); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return upsert(request, retryPolicyInstance); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options); String content = toJsonString(document, mapper); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class));} ); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(req); } return this.delete(req, retryPolicyInstance) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class));}); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<Pair<String, PartitionKey>> itemKeyList, String collectionLink, FeedOptions options, Class<T> klass) { RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(collection .getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemKeyList .forEach(stringPartitionKeyPair -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString(BridgeInternal .getPartitionKeyInternal(stringPartitionKeyPair .getRight()), collection .getPartitionKey()); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<Pair<String, PartitionKey>> list = new ArrayList<>(); list.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, list); } else { List<Pair<String, PartitionKey>> pairs = partitionRangeItemKeyMap.get(range); pairs.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, pairs); } }); Set<PartitionKeyRange> partitionKeyRanges = partitionRangeItemKeyMap.keySet(); List<PartitionKeyRange> ranges = new ArrayList<>(); ranges.addAll(partitionKeyRanges); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); String sqlQuery = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; return createReadManyQuery(collectionLink, new SqlQuerySpec(sqlQuery), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<T>(); HashMap<String, String> headers = new HashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> document.toObject(klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<Pair<String, PartitionKey>>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); String idValue = pair.getLeft(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = pair.getLeft(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private String getCurentParamName(int paramCnt){ return "@param" + paramCnt; } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, FeedOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient DocumentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { return RxDocumentClientImpl.this.query(request).single(); } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed(final String collectionLink, final ChangeFeedOptions changeFeedOptions) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<Document>(this, ResourceType.Document, Document.class, collectionLink, changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, FeedOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, Object[] procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, Object[] procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, Object[] procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, FeedOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, FeedOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, FeedOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, FeedOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, FeedOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, FeedOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(FeedOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeedCollectionChild(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; RequestOptions requestOptions = new RequestOptions(); requestOptions.setPartitionKey(options.partitionKey()); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = this.addPartitionKeyInformation(request, null, null, requestOptions, collectionObs); return requestObs.flatMap(req -> this.readFeed(req) .map(response -> toFeedResponsePage(response, klass))); }, this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, FeedOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(response -> toDatabaseAccount(response)); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); this.populateHeaders(request, RequestVerb.GET); request.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(request).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> { this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((request.getOperationType() == OperationType.Query || request.getOperationType() == OperationType.SqlQuery) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); try { this.reactorHttpClient.shutdown(); } catch (Exception e) { logger.warn("shutting down reactorHttpClient failed", e); } logger.info("Shutting down completed."); } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider { private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private CosmosKeyCredential cosmosKeyCredential; private TokenResolver tokenResolver; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final HttpClient reactorHttpClient; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, TokenResolver tokenResolver, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled); this.tokenResolver = tokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { logger.info( "Initializing DocumentClient with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.cosmosKeyCredential = cosmosKeyCredential; if (this.cosmosKeyCredential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)){ this.cosmosKeyCredential = new CosmosKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(); } boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; } public void init() { this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); this.collectionCache = new RxClientCollectionCache(this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } } private void initializeDirectConnectivity() { this.storeClientFactory = new StoreClientFactory( this.configs, this.connectionPolicy.getRequestTimeoutInMillis() / 1000, 0, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled ); this.addressResolver = new GlobalAddressResolver( this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { return new RxGatewayStoreModel(sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeoutInMillis(this.connectionPolicy.getIdleConnectionTimeoutInMillis()) .withPoolSize(this.connectionPolicy.getMaxPoolSize()) .withHttpProxy(this.connectionPolicy.getProxy()) .withRequestTimeoutInMillis(this.connectionPolicy.getRequestTimeoutInMillis()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig); } else { return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient( this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, false ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, database, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(FeedOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResouceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResouceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResouceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResouceLink, Paths.USERS_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResouceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResouceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResouceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResouceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResouceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String queryResourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createDocumentQueryExecutionContextAsync(queryClient, resourceTypeEnum, klass, sqlQuery , options, queryResourceLink, false, activityId); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, FeedOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.DELETE); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.GET); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.GET); return gatewayProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.POST); return this.getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, FeedOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(Object[] objectArray) { String[] stringArray = new String[objectArray.length]; for (int i = 0; i < objectArray.length; ++i) { Object object = objectArray[i]; if (object instanceof JsonSerializable) { stringArray[i] = ((JsonSerializable) object).toJson(); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } if (options.getAccessCondition() != null) { if (options.getAccessCondition().getType() == AccessConditionType.IF_MATCH) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getAccessCondition().getCondition()); } else { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getAccessCondition().getCondition()); } } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.isPopulateQuotaInfo()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } return headers; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null) { CosmosItemProperties cosmosItemProperties; if (objectDoc instanceof CosmosItemProperties) { cosmosItemProperties = (CosmosItemProperties) objectDoc; } else { contentAsByteBuffer.rewind(); cosmosItemProperties = new CosmosItemProperties(contentAsByteBuffer); } partitionKeyInternal = extractPartitionKeyValueFromDocument(cosmosItemProperties, partitionKeyDefinition); } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } private static PartitionKeyInternal extractPartitionKeyValueFromDocument( CosmosItemProperties document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = document.getObjectByPath(parts); if (value == null || value.getClass() == ObjectNode.class) { value = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } ByteBuffer content = serializeJsonToByteBuffer(document, mapper); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private void populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.tokenResolver != null || this.cosmosKeyCredential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.tokenResolver != null) { return this.tokenResolver.getAuthorizationToken(requestVerb, resourceName, this.resolveCosmosResourceType(resourceType), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (cosmosKeyCredential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { try { return CosmosResourceType.valueOf(resourceType.toString()); } catch (IllegalArgumentException e) { return CosmosResourceType.System; } } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy retryPolicy) { populateHeaders(request, RequestVerb.POST); RxStoreModel storeProxy = this.getStoreProxy(request); if(request.requestContext != null && retryPolicy.getRetryCount() > 0) { retryPolicy.updateEndTime(); request.requestContext.updateRetryContext(retryPolicy, true); } return storeProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.POST); Map<String, String> headers = request.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PUT); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> { if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } return create(request, requestRetryPolicy); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return upsert(request, retryPolicyInstance); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options); ByteBuffer content = serializeJsonToByteBuffer(document, mapper); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class));} ); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(req); } return this.delete(req, retryPolicyInstance) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class));}); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<Pair<String, PartitionKey>> itemKeyList, String collectionLink, FeedOptions options, Class<T> klass) { RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(collection .getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemKeyList .forEach(stringPartitionKeyPair -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString(BridgeInternal .getPartitionKeyInternal(stringPartitionKeyPair .getRight()), collection .getPartitionKey()); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<Pair<String, PartitionKey>> list = new ArrayList<>(); list.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, list); } else { List<Pair<String, PartitionKey>> pairs = partitionRangeItemKeyMap.get(range); pairs.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, pairs); } }); Set<PartitionKeyRange> partitionKeyRanges = partitionRangeItemKeyMap.keySet(); List<PartitionKeyRange> ranges = new ArrayList<>(); ranges.addAll(partitionKeyRanges); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); String sqlQuery = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; return createReadManyQuery(collectionLink, new SqlQuerySpec(sqlQuery), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<T>(); HashMap<String, String> headers = new HashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> document.toObject(klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<Pair<String, PartitionKey>>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); String idValue = pair.getLeft(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = pair.getLeft(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private String getCurentParamName(int paramCnt){ return "@param" + paramCnt; } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, FeedOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient DocumentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { return RxDocumentClientImpl.this.query(request).single(); } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed(final String collectionLink, final ChangeFeedOptions changeFeedOptions) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<Document>(this, ResourceType.Document, Document.class, collectionLink, changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, FeedOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, Object[] procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, Object[] procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, Object[] procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, FeedOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, FeedOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, FeedOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, FeedOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, FeedOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, FeedOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(FeedOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeedCollectionChild(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; RequestOptions requestOptions = new RequestOptions(); requestOptions.setPartitionKey(options.partitionKey()); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = this.addPartitionKeyInformation(request, null, null, requestOptions, collectionObs); return requestObs.flatMap(req -> this.readFeed(req) .map(response -> toFeedResponsePage(response, klass))); }, this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, FeedOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(response -> toDatabaseAccount(response)); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); this.populateHeaders(request, RequestVerb.GET); request.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(request).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> { this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((request.getOperationType() == OperationType.Query || request.getOperationType() == OperationType.SqlQuery) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); try { this.reactorHttpClient.shutdown(); } catch (Exception e) { logger.warn("shutting down reactorHttpClient failed", e); } logger.info("Shutting down completed."); } }
there is a time window where GlobalEndpointManager is instantiated but hasn't fetched DatabaseAccount, within that period how do we ensure this never returns null?
public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; }
return this.latestDatabaseAccount;
public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private final AsyncCache<String, DatabaseAccount> databaseAccountAsyncCache; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; this.databaseAccountAsyncCache = new AsyncCache<>(); try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } public Mono<DatabaseAccount> getDatabaseAccountFromCache(URI defaultEndpoint) { return this.databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, () -> this.owner.getDatabaseAccountFromEndpoint(defaultEndpoint).single().doOnSuccess(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } this.refreshLocationAsync(databaseAccount, false); })); } private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { final GlobalEndpointManager that = this; Callable<Mono<DatabaseAccount>> fetchDatabaseAccount = () -> { return that.owner.getDatabaseAccountFromEndpoint(serviceEndpoint).doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); }; Mono<DatabaseAccount> obsoleteValueMono = databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, fetchDatabaseAccount); return obsoleteValueMono.flatMap(obsoleteValue -> { if (firstTimeDatabaseAccountInitialization.compareAndSet(true, false)) { return Mono.just(obsoleteValue); } return databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, obsoleteValue, fetchDatabaseAccount).doOnError(t -> { databaseAccountAsyncCache.set(StringUtils.EMPTY, obsoleteValue); }); }); } public boolean isClosed() { return this.isClosed; } }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } /** * This will provide the latest databaseAccount. * If due to some reason last databaseAccount update was null, * this method will return previous valid value * @return DatabaseAccount */ private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { return this.owner.getDatabaseAccountFromEndpoint(serviceEndpoint) .doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); } public boolean isClosed() { return this.isClosed; } }
GatewayServiceConfigurationReader constructor is doing blocking call on async cache , so for first time if cache in null it will hit the BE
private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.serviceEndpoint, this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }
DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount();
private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); assert(databaseAccount != null); this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider { private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean enableTransportClientSharing; private CosmosKeyCredential cosmosKeyCredential; private TokenResolver tokenResolver; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final HttpClient reactorHttpClient; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, TokenResolver tokenResolver, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverride, boolean enableTransportClientSharing) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverride, enableTransportClientSharing); this.tokenResolver = tokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean enableTransportClientSharing) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverrideEnabled, enableTransportClientSharing); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean enableTransportClientSharing) { logger.info( "Initializing DocumentClient with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); this.enableTransportClientSharing = enableTransportClientSharing; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.cosmosKeyCredential = cosmosKeyCredential; if (this.cosmosKeyCredential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)){ this.cosmosKeyCredential = new CosmosKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(); } boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; } public void init() { this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); this.collectionCache = new RxClientCollectionCache(this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } } private void initializeDirectConnectivity() { this.storeClientFactory = new StoreClientFactory( this.configs, this.connectionPolicy.getRequestTimeoutInMillis() / 1000, 0, this.userAgentContainer, this.enableTransportClientSharing ); this.addressResolver = new GlobalAddressResolver( this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { return new RxGatewayStoreModel(sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeoutInMillis(this.connectionPolicy.getIdleConnectionTimeoutInMillis()) .withPoolSize(this.connectionPolicy.getMaxPoolSize()) .withHttpProxy(this.connectionPolicy.getProxy()) .withRequestTimeoutInMillis(this.connectionPolicy.getRequestTimeoutInMillis()); return HttpClient.createFixed(httpClientConfig); } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient( this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, false ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, database, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(FeedOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResouceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResouceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResouceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResouceLink, Paths.USERS_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResouceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResouceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResouceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResouceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResouceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String queryResourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createDocumentQueryExecutionContextAsync(queryClient, resourceTypeEnum, klass, sqlQuery , options, queryResourceLink, false, activityId); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, FeedOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.DELETE); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.GET); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.GET); return gatewayProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.POST); return this.getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, FeedOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(Object[] objectArray) { String[] stringArray = new String[objectArray.length]; for (int i = 0; i < objectArray.length; ++i) { Object object = objectArray[i]; if (object instanceof JsonSerializable) { stringArray[i] = ((JsonSerializable) object).toJson(); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } if (options.getAccessCondition() != null) { if (options.getAccessCondition().getType() == AccessConditionType.IF_MATCH) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getAccessCondition().getCondition()); } else { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getAccessCondition().getCondition()); } } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.isPopulateQuotaInfo()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } return headers; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsString, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsString, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, String contentAsString, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsString != null) { CosmosItemProperties cosmosItemProperties; if (objectDoc instanceof CosmosItemProperties) { cosmosItemProperties = (CosmosItemProperties) objectDoc; } else { cosmosItemProperties = new CosmosItemProperties(contentAsString); } partitionKeyInternal = extractPartitionKeyValueFromDocument(cosmosItemProperties, partitionKeyDefinition); } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } private static PartitionKeyInternal extractPartitionKeyValueFromDocument( CosmosItemProperties document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = document.getObjectByPath(parts); if (value == null || value.getClass() == ObjectNode.class) { value = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } String content = toJsonString(document, mapper); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private void populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.tokenResolver != null || this.cosmosKeyCredential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.tokenResolver != null) { return this.tokenResolver.getAuthorizationToken(requestVerb, resourceName, this.resolveCosmosResourceType(resourceType), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (cosmosKeyCredential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { try { return CosmosResourceType.valueOf(resourceType.toString()); } catch (IllegalArgumentException e) { return CosmosResourceType.System; } } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy retryPolicy) { populateHeaders(request, RequestVerb.POST); RxStoreModel storeProxy = this.getStoreProxy(request); if(request.requestContext != null && retryPolicy.getRetryCount() > 0) { retryPolicy.updateEndTime(); request.requestContext.updateRetryContext(retryPolicy, true); } return storeProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.POST); Map<String, String> headers = request.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PUT); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> { if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } return create(request, requestRetryPolicy); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return upsert(request, retryPolicyInstance); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options); String content = toJsonString(document, mapper); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class));} ); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(req); } return this.delete(req, retryPolicyInstance) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class));}); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<Pair<String, PartitionKey>> itemKeyList, String collectionLink, FeedOptions options, Class<T> klass) { RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(collection .getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemKeyList .forEach(stringPartitionKeyPair -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString(BridgeInternal .getPartitionKeyInternal(stringPartitionKeyPair .getRight()), collection .getPartitionKey()); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<Pair<String, PartitionKey>> list = new ArrayList<>(); list.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, list); } else { List<Pair<String, PartitionKey>> pairs = partitionRangeItemKeyMap.get(range); pairs.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, pairs); } }); Set<PartitionKeyRange> partitionKeyRanges = partitionRangeItemKeyMap.keySet(); List<PartitionKeyRange> ranges = new ArrayList<>(); ranges.addAll(partitionKeyRanges); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); String sqlQuery = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; return createReadManyQuery(collectionLink, new SqlQuerySpec(sqlQuery), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<T>(); HashMap<String, String> headers = new HashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> document.toObject(klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<Pair<String, PartitionKey>>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); String idValue = pair.getLeft(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = pair.getLeft(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private String getCurentParamName(int paramCnt){ return "@param" + paramCnt; } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, FeedOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient DocumentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { return RxDocumentClientImpl.this.query(request).single(); } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed(final String collectionLink, final ChangeFeedOptions changeFeedOptions) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<Document>(this, ResourceType.Document, Document.class, collectionLink, changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, FeedOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, Object[] procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, Object[] procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, Object[] procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, FeedOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, FeedOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, FeedOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, FeedOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, FeedOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, FeedOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(FeedOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeedCollectionChild(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; RequestOptions requestOptions = new RequestOptions(); requestOptions.setPartitionKey(options.partitionKey()); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = this.addPartitionKeyInformation(request, null, null, requestOptions, collectionObs); return requestObs.flatMap(req -> this.readFeed(req) .map(response -> toFeedResponsePage(response, klass))); }, this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, FeedOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(response -> toDatabaseAccount(response)); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); this.populateHeaders(request, RequestVerb.GET); request.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(request).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> { this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((request.getOperationType() == OperationType.Query || request.getOperationType() == OperationType.SqlQuery) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); try { this.reactorHttpClient.shutdown(); } catch (Exception e) { logger.warn("shutting down reactorHttpClient failed", e); } logger.info("Shutting down completed."); } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider { private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private CosmosKeyCredential cosmosKeyCredential; private TokenResolver tokenResolver; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final HttpClient reactorHttpClient; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, TokenResolver tokenResolver, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled); this.tokenResolver = tokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, cosmosKeyCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosKeyCredential cosmosKeyCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled) { logger.info( "Initializing DocumentClient with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.cosmosKeyCredential = cosmosKeyCredential; if (this.cosmosKeyCredential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)){ this.cosmosKeyCredential = new CosmosKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.cosmosKeyCredential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(); } boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; } public void init() { this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); this.collectionCache = new RxClientCollectionCache(this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } } private void initializeDirectConnectivity() { this.storeClientFactory = new StoreClientFactory( this.configs, this.connectionPolicy.getRequestTimeoutInMillis() / 1000, 0, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled ); this.addressResolver = new GlobalAddressResolver( this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient) { return new RxGatewayStoreModel(sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeoutInMillis(this.connectionPolicy.getIdleConnectionTimeoutInMillis()) .withPoolSize(this.connectionPolicy.getMaxPoolSize()) .withHttpProxy(this.connectionPolicy.getProxy()) .withRequestTimeoutInMillis(this.connectionPolicy.getRequestTimeoutInMillis()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig); } else { return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient( this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, false ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, database, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(FeedOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResouceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResouceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResouceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResouceLink, Paths.USERS_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResouceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResouceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResouceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResouceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResouceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String queryResourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createDocumentQueryExecutionContextAsync(queryClient, resourceTypeEnum, klass, sqlQuery , options, queryResourceLink, false, activityId); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, FeedOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Create, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.DocumentCollection, path, collection, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.DELETE); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.GET); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.GET); return gatewayProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { populateHeaders(request, RequestVerb.POST); return this.getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, FeedOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(Object[] objectArray) { String[] stringArray = new String[objectArray.length]; for (int i = 0; i < objectArray.length; ++i) { Object object = objectArray[i]; if (object instanceof JsonSerializable) { stringArray[i] = ((JsonSerializable) object).toJson(); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } if (options.getAccessCondition() != null) { if (options.getAccessCondition().getType() == AccessConditionType.IF_MATCH) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getAccessCondition().getCondition()); } else { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getAccessCondition().getCondition()); } } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.isPopulateQuotaInfo()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } return headers; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null) { CosmosItemProperties cosmosItemProperties; if (objectDoc instanceof CosmosItemProperties) { cosmosItemProperties = (CosmosItemProperties) objectDoc; } else { contentAsByteBuffer.rewind(); cosmosItemProperties = new CosmosItemProperties(contentAsByteBuffer); } partitionKeyInternal = extractPartitionKeyValueFromDocument(cosmosItemProperties, partitionKeyDefinition); } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } private static PartitionKeyInternal extractPartitionKeyValueFromDocument( CosmosItemProperties document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = document.getObjectByPath(parts); if (value == null || value.getClass() == ObjectNode.class) { value = BridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } ByteBuffer content = serializeJsonToByteBuffer(document, mapper); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private void populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.tokenResolver != null || this.cosmosKeyCredential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.tokenResolver != null) { return this.tokenResolver.getAuthorizationToken(requestVerb, resourceName, this.resolveCosmosResourceType(resourceType), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (cosmosKeyCredential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { try { return CosmosResourceType.valueOf(resourceType.toString()); } catch (IllegalArgumentException e) { return CosmosResourceType.System; } } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy retryPolicy) { populateHeaders(request, RequestVerb.POST); RxStoreModel storeProxy = this.getStoreProxy(request); if(request.requestContext != null && retryPolicy.getRetryCount() > 0) { retryPolicy.updateEndTime(); request.requestContext.updateRetryContext(retryPolicy, true); } return storeProxy.processMessage(request); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.POST); Map<String, String> headers = request.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request) .map(response -> { this.captureSessionToken(request, response); return response; } ); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PUT); if(request.requestContext != null && documentClientRetryPolicy.getRetryCount() > 0) { documentClientRetryPolicy.updateEndTime(); request.requestContext.updateRetryContext(documentClientRetryPolicy, true); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> { if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } return create(request, requestRetryPolicy); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return upsert(request, retryPolicyInstance); }); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options); ByteBuffer content = serializeJsonToByteBuffer(document, mapper); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class));} ); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(req); } return this.delete(req, retryPolicyInstance) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class));}); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Document, path, requestHeaders, options); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<Pair<String, PartitionKey>> itemKeyList, String collectionLink, FeedOptions options, Class<T> klass) { RxDocumentServiceRequest request = RxDocumentServiceRequest.create( OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(collection .getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemKeyList .forEach(stringPartitionKeyPair -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString(BridgeInternal .getPartitionKeyInternal(stringPartitionKeyPair .getRight()), collection .getPartitionKey()); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<Pair<String, PartitionKey>> list = new ArrayList<>(); list.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, list); } else { List<Pair<String, PartitionKey>> pairs = partitionRangeItemKeyMap.get(range); pairs.add(stringPartitionKeyPair); partitionRangeItemKeyMap.put(range, pairs); } }); Set<PartitionKeyRange> partitionKeyRanges = partitionRangeItemKeyMap.keySet(); List<PartitionKeyRange> ranges = new ArrayList<>(); ranges.addAll(partitionKeyRanges); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); String sqlQuery = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; return createReadManyQuery(collectionLink, new SqlQuerySpec(sqlQuery), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<T>(); HashMap<String, String> headers = new HashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> document.toObject(klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<Pair<String, PartitionKey>>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<Pair<String, PartitionKey>>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); String idValue = pair.getLeft(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<Pair<String, PartitionKey>> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); SqlParameterList parameters = new SqlParameterList(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { Pair<String, PartitionKey> pair = idPartitionKeyPairList.get(i); PartitionKey pkValueAsPartitionKey = pair.getRight(); Object pkValue = BridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = pair.getLeft(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private String getCurentParamName(int paramCnt){ return "@param" + paramCnt; } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, FeedOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = DocumentQueryClientImpl(RxDocumentClientImpl.this); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, FeedOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient DocumentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { return RxDocumentClientImpl.this.query(request).single(); } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed(final String collectionLink, final ChangeFeedOptions changeFeedOptions) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<Document>(this, ResourceType.Document, Document.class, collectionLink, changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, FeedOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, Object[] procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, Object[] procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, Object[] procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, FeedOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, FeedOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, FeedOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, FeedOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, FeedOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, FeedOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, FeedOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, FeedOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, FeedOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(FeedOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeedCollectionChild(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; RequestOptions requestOptions = new RequestOptions(); requestOptions.setPartitionKey(options.partitionKey()); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(request); Mono<RxDocumentServiceRequest> requestObs = this.addPartitionKeyInformation(request, null, null, requestOptions, collectionObs); return requestObs.flatMap(req -> this.readFeed(req) .map(response -> toFeedResponsePage(response, klass))); }, this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(FeedOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new FeedOptions(); } int maxPageSize = options.maxItemCount() != null ? options.maxItemCount() : -1; final FeedOptions finalFeedOptions = options; BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalFeedOptions); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> { return ObservableHelper.inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), this.resetSessionTokenRetryPolicy.getRequestPolicy()); }; return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, FeedOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, FeedOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(response -> toDatabaseAccount(response)); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); this.populateHeaders(request, RequestVerb.GET); request.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(request).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> { this.useMultipleWriteLocations = this.connectionPolicy.getUsingMultipleWriteLocations() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((request.getOperationType() == OperationType.Query || request.getOperationType() == OperationType.SqlQuery) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); try { this.reactorHttpClient.shutdown(); } catch (Exception e) { logger.warn("shutting down reactorHttpClient failed", e); } logger.info("Shutting down completed."); } }
We are calling init on GlobalEndpointManager from RxDocumentClientImpl which will update this value , any future call if return null due to any exception or error we will not update latestDatabaseAccount and async cahce , they will hold the previous not null value
public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; }
return this.latestDatabaseAccount;
public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private final AsyncCache<String, DatabaseAccount> databaseAccountAsyncCache; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; this.databaseAccountAsyncCache = new AsyncCache<>(); try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } public Mono<DatabaseAccount> getDatabaseAccountFromCache(URI defaultEndpoint) { return this.databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, () -> this.owner.getDatabaseAccountFromEndpoint(defaultEndpoint).single().doOnSuccess(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } this.refreshLocationAsync(databaseAccount, false); })); } private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { final GlobalEndpointManager that = this; Callable<Mono<DatabaseAccount>> fetchDatabaseAccount = () -> { return that.owner.getDatabaseAccountFromEndpoint(serviceEndpoint).doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); }; Mono<DatabaseAccount> obsoleteValueMono = databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, fetchDatabaseAccount); return obsoleteValueMono.flatMap(obsoleteValue -> { if (firstTimeDatabaseAccountInitialization.compareAndSet(true, false)) { return Mono.just(obsoleteValue); } return databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, obsoleteValue, fetchDatabaseAccount).doOnError(t -> { databaseAccountAsyncCache.set(StringUtils.EMPTY, obsoleteValue); }); }); } public boolean isClosed() { return this.isClosed; } }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } /** * This will provide the latest databaseAccount. * If due to some reason last databaseAccount update was null, * this method will return previous valid value * @return DatabaseAccount */ private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { return this.owner.getDatabaseAccountFromEndpoint(serviceEndpoint) .doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); } public boolean isClosed() { return this.isClosed; } }
Please document the new API saying what you said above.
public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; }
return this.latestDatabaseAccount;
public DatabaseAccount getLatestDatabaseAccount() { return this.latestDatabaseAccount; }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private final AsyncCache<String, DatabaseAccount> databaseAccountAsyncCache; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; this.databaseAccountAsyncCache = new AsyncCache<>(); try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } public Mono<DatabaseAccount> getDatabaseAccountFromCache(URI defaultEndpoint) { return this.databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, () -> this.owner.getDatabaseAccountFromEndpoint(defaultEndpoint).single().doOnSuccess(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } this.refreshLocationAsync(databaseAccount, false); })); } private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { final GlobalEndpointManager that = this; Callable<Mono<DatabaseAccount>> fetchDatabaseAccount = () -> { return that.owner.getDatabaseAccountFromEndpoint(serviceEndpoint).doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); }; Mono<DatabaseAccount> obsoleteValueMono = databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, null, fetchDatabaseAccount); return obsoleteValueMono.flatMap(obsoleteValue -> { if (firstTimeDatabaseAccountInitialization.compareAndSet(true, false)) { return Mono.just(obsoleteValue); } return databaseAccountAsyncCache.getAsync(StringUtils.EMPTY, obsoleteValue, fetchDatabaseAccount).doOnError(t -> { databaseAccountAsyncCache.set(StringUtils.EMPTY, obsoleteValue); }); }); } public boolean isClosed() { return this.isClosed; } }
class GlobalEndpointManager implements AutoCloseable { private static final Logger logger = LoggerFactory.getLogger(GlobalEndpointManager.class); private final int backgroundRefreshLocationTimeIntervalInMS; private final LocationCache locationCache; private final URI defaultEndpoint; private final ConnectionPolicy connectionPolicy; private final DatabaseAccountManagerInternal owner; private final AtomicBoolean isRefreshing; private final AtomicBoolean refreshInBackground; private final ExecutorService executor = Executors.newSingleThreadExecutor(); private final Scheduler scheduler = Schedulers.fromExecutor(executor); private volatile boolean isClosed; private AtomicBoolean firstTimeDatabaseAccountInitialization = new AtomicBoolean(true); private volatile DatabaseAccount latestDatabaseAccount; public GlobalEndpointManager(DatabaseAccountManagerInternal owner, ConnectionPolicy connectionPolicy, Configs configs) { this.backgroundRefreshLocationTimeIntervalInMS = configs.getUnavailableLocationsExpirationTimeInSeconds() * 1000; try { this.locationCache = new LocationCache( new ArrayList<>(connectionPolicy.getPreferredLocations() != null ? connectionPolicy.getPreferredLocations(): Collections.emptyList() ), owner.getServiceEndpoint(), connectionPolicy.getEnableEndpointDiscovery(), BridgeInternal.getUseMultipleWriteLocations(connectionPolicy), configs); this.owner = owner; this.defaultEndpoint = owner.getServiceEndpoint(); this.connectionPolicy = connectionPolicy; this.isRefreshing = new AtomicBoolean(false); this.refreshInBackground = new AtomicBoolean(false); this.isClosed = false; } catch (Exception e) { throw new IllegalArgumentException(e); } } public void init() { startRefreshLocationTimerAsync(true).block(); } public UnmodifiableList<URI> getReadEndpoints() { return this.locationCache.getReadEndpoints(); } public UnmodifiableList<URI> getWriteEndpoints() { return this.locationCache.getWriteEndpoints(); } public static Mono<DatabaseAccount> getDatabaseAccountFromAnyLocationsAsync( URI defaultEndpoint, List<String> locations, Function<URI, Mono<DatabaseAccount>> getDatabaseAccountFn) { return getDatabaseAccountFn.apply(defaultEndpoint).onErrorResume( e -> { logger.error("Fail to reach global gateway [{}], [{}]", defaultEndpoint, e.getMessage()); if (locations.isEmpty()) { return Mono.error(e); } Flux<Flux<DatabaseAccount>> obs = Flux.range(0, locations.size()) .map(index -> getDatabaseAccountFn.apply(LocationHelper.getLocationEndpoint(defaultEndpoint, locations.get(index))).flux()); Mono<DatabaseAccount> res = Flux.concatDelayError(obs).take(1).single(); return res.doOnError( innerE -> logger.error("Fail to reach location any of locations {} {}", String.join(",", locations), innerE.getMessage())); }); } public URI resolveServiceEndpoint(RxDocumentServiceRequest request) { return this.locationCache.resolveServiceEndpoint(request); } public void markEndpointUnavailableForRead(URI endpoint) { logger.debug("Marking endpoint {} unavailable for read",endpoint); this.locationCache.markEndpointUnavailableForRead(endpoint);; } public void markEndpointUnavailableForWrite(URI endpoint) { logger.debug("Marking endpoint {} unavailable for Write",endpoint); this.locationCache.markEndpointUnavailableForWrite(endpoint); } public boolean CanUseMultipleWriteLocations(RxDocumentServiceRequest request) { return this.locationCache.canUseMultipleWriteLocations(request); } public void close() { this.isClosed = true; this.executor.shutdown(); logger.debug("GlobalEndpointManager closed."); } public Mono<Void> refreshLocationAsync(DatabaseAccount databaseAccount, boolean forceRefresh) { return Mono.defer(() -> { logger.debug("refreshLocationAsync() invoked"); if (forceRefresh) { Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); return dbAccount; }).flatMap(dbAccount -> { return Mono.empty(); }); } if (!isRefreshing.compareAndSet(false, true)) { logger.debug("in the middle of another refresh. Not invoking a new refresh."); return Mono.empty(); } logger.debug("will refresh"); return this.refreshLocationPrivateAsync(databaseAccount).doOnError(e -> this.isRefreshing.set(false)); }); } /** * This will provide the latest databaseAccount. * If due to some reason last databaseAccount update was null, * this method will return previous valid value * @return DatabaseAccount */ private Mono<Void> refreshLocationPrivateAsync(DatabaseAccount databaseAccount) { return Mono.defer(() -> { logger.debug("refreshLocationPrivateAsync() refreshing locations"); if (databaseAccount != null) { this.locationCache.onDatabaseAccountRead(databaseAccount); } Utils.ValueHolder<Boolean> canRefreshInBackground = new Utils.ValueHolder<>(); if (this.locationCache.shouldRefreshEndpoints(canRefreshInBackground)) { logger.debug("shouldRefreshEndpoints: true"); if (databaseAccount == null && !canRefreshInBackground.v) { logger.debug("shouldRefreshEndpoints: can't be done in background"); Mono<DatabaseAccount> databaseAccountObs = getDatabaseAccountFromAnyLocationsAsync( this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.map(dbAccount -> { this.locationCache.onDatabaseAccountRead(dbAccount); this.isRefreshing.set(false); return dbAccount; }).flatMap(dbAccount -> { if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } return Mono.empty(); }); } if (!this.refreshInBackground.get()) { this.startRefreshLocationTimerAsync(); } this.isRefreshing.set(false); return Mono.empty(); } else { logger.debug("shouldRefreshEndpoints: false, nothing to do."); this.isRefreshing.set(false); return Mono.empty(); } }); } private void startRefreshLocationTimerAsync() { startRefreshLocationTimerAsync(false).subscribe(); } private Mono<Void> startRefreshLocationTimerAsync(boolean initialization) { if (this.isClosed) { logger.debug("startRefreshLocationTimerAsync: nothing to do, it is closed"); return Mono.empty(); } logger.debug("registering a refresh in [{}] ms", this.backgroundRefreshLocationTimeIntervalInMS); LocalDateTime now = LocalDateTime.now(); int delayInMillis = initialization ? 0: this.backgroundRefreshLocationTimeIntervalInMS; this.refreshInBackground.set(true); return Mono.delay(Duration.ofMillis(delayInMillis)) .flatMap( t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } logger.debug("startRefreshLocationTimerAsync() - Invoking refresh, I was registered on [{}]", now); Mono<DatabaseAccount> databaseAccountObs = GlobalEndpointManager.getDatabaseAccountFromAnyLocationsAsync(this.defaultEndpoint, new ArrayList<>(this.connectionPolicy.getPreferredLocations()), this::getDatabaseAccountAsync); return databaseAccountObs.flatMap(dbAccount -> { logger.debug("db account retrieved"); this.refreshInBackground.set(false); return this.refreshLocationPrivateAsync(dbAccount); }); }).onErrorResume(ex -> { logger.error("startRefreshLocationTimerAsync() - Unable to refresh database account from any location. Exception: {}", ex.toString(), ex); this.startRefreshLocationTimerAsync(); return Mono.empty(); }).subscribeOn(scheduler); } private Mono<DatabaseAccount> getDatabaseAccountAsync(URI serviceEndpoint) { return this.owner.getDatabaseAccountFromEndpoint(serviceEndpoint) .doOnNext(databaseAccount -> { if(databaseAccount != null) { this.latestDatabaseAccount = databaseAccount; } logger.debug("account retrieved: {}", databaseAccount); }).single(); } public boolean isClosed() { return this.isClosed; } }