comment
stringlengths
1
45k
method_body
stringlengths
23
281k
target_code
stringlengths
0
5.16k
method_body_after
stringlengths
12
281k
context_before
stringlengths
8
543k
context_after
stringlengths
8
543k
No logic for this case?
public BinaryData getBodyAsBinaryData() { return body; }
return body;
public BinaryData getBodyAsBinaryData() { return BinaryData.fromBytes(annotatedMessage.getBody().getFirstData()); }
class EventData { private static final int MAX_MESSAGE_ID_LENGTH = 128; private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final int MAX_SESSION_ID_LENGTH = 128; private final BinaryData body; private final AmqpAnnotatedMessage amqpAnnotatedMessage; private final ClientLogger logger = new ClientLogger(EventData.class); private Context context; /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null."))); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(body, Context.NONE); } /** * Creates an event with the given {@code body}, system properties and context. * * @param body The data to set for this event. * @param context A specified key-value pair of type {@link Context}. * @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}. */ EventData(BinaryData body, Context context) { this.body = Objects.requireNonNull(body, "'body' cannot be null."); this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.amqpAnnotatedMessage = new AmqpAnnotatedMessage(AmqpMessageBody.fromData(body.toBytes())); } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. */ public Map<String, Object> getProperties() { return amqpAnnotatedMessage.getApplicationProperties(); } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ public byte[] getBody() { final AmqpMessageBodyType type = amqpAnnotatedMessage.getBody().getBodyType(); switch (type) { case DATA: return amqpAnnotatedMessage.getBody().getFirstData(); case SEQUENCE: case VALUE: throw logger.logExceptionAsError(new UnsupportedOperationException("Not supported AmqpBodyType: " + type.toString())); default: throw logger.logExceptionAsError(new IllegalArgumentException("Unknown AmqpBodyType: " + type.toString())); } } /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(body.toBytes(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(OFFSET_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * Sets the offset of the event when it was received from the associated Event Hub partition. * * @param offset Offset value of this message * * @return The updated {@link EventData}. * @see */ public EventData setOffset(Long offset) { amqpAnnotatedMessage.getMessageAnnotations().put(OFFSET_ANNOTATION_NAME.getValue(), offset); return this; } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return (String) amqpAnnotatedMessage.getMessageAnnotations().get(PARTITION_KEY_ANNOTATION_NAME.getValue()); } /** * Sets the instant, in UTC, of when the event was enqueued in the Event Hub partition. * * @param enqueuedTime Enqueued time of this message * * @return The updated {@link EventData}. * @see */ public EventData setEnqueuedTime(Instant enqueuedTime) { amqpAnnotatedMessage.getMessageAnnotations().put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), enqueuedTime); return this; } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); return value != null ? ((Date) value).toInstant() : null; } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> * {@link EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * Sets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. * * @param sequenceNumber Sequence number of this message * * @return The updated {@link EventData}. * @see */ public EventData setSequenceNumber(Long sequenceNumber) { amqpAnnotatedMessage.getMessageAnnotations().put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), sequenceNumber); return this; } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(body.toBytes(), eventData.body.toBytes()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(body.toBytes()); } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * @throws NullPointerException if {@code key} or {@code value} is null. * @return The updated {@link EventData}. */ public EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } /** * Gets the content type of the message. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * @return The content type of the {@link EventData}. */ public String getContentType() { return amqpAnnotatedMessage.getProperties().getContentType(); } /** * Sets the content type of the {@link EventData}. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * * @param contentType RFC2045 Content-Type descriptor of the message. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { amqpAnnotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets a correlation identifier. * <p> * Allows an application to specify a context for the message for the purposes of correlation, for example * reflecting the MessageId of a message that is being replied to. * </p> * * @return The correlation id of this message. */ public String getCorrelationId() { String correlationId = null; AmqpMessageId amqpCorrelationId = amqpAnnotatedMessage.getProperties().getCorrelationId(); if (amqpCorrelationId != null) { correlationId = amqpCorrelationId.toString(); } return correlationId; } /** * Sets a correlation identifier. * * @param correlationId correlation id of this message * * @return The updated {@link EventData}. * @see */ public EventData setCorrelationId(String correlationId) { AmqpMessageId id = null; if (correlationId != null) { id = new AmqpMessageId(correlationId); } amqpAnnotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the message id. * * <p> * The message identifier is an application-defined value that uniquely identifies the message and its payload. The * identifier is a free-form string and can reflect a GUID or an identifier derived from the application context. * </p> * * @return Id of the {@link EventData}. */ public byte[] getUserId() { return amqpAnnotatedMessage.getProperties().getUserId(); } /** * Sets the message id. * * @param userId The message id to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code messageId} is too long. */ public EventData setUserId(byte[] userId) { amqpAnnotatedMessage.getProperties().setUserId(userId); return this; } /** * Gets the message id. * * <p> * The message identifier is an application-defined value that uniquely identifies the message and its payload. The * identifier is a free-form string and can reflect a GUID or an identifier derived from the application context. * </p> * * @return Id of the {@link EventData}. */ public String getMessageId() { String messageId = null; AmqpMessageId amqpMessageId = amqpAnnotatedMessage.getProperties().getMessageId(); if (amqpMessageId != null) { messageId = amqpMessageId.toString(); } return messageId; } /** * Sets the message id. * * @param messageId The message id to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code messageId} is too long. */ public EventData setMessageId(String messageId) { checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH); AmqpMessageId id = null; if (messageId != null) { id = new AmqpMessageId(messageId); } amqpAnnotatedMessage.getProperties().setMessageId(id); return this; } /** * Gets the subject for the message. * * <p> * This property enables the application to indicate the purpose of the message to the receiver in a standardized * fashion, similar to an email subject line. The mapped AMQP property is "subject". * </p> * * @return The subject for the message. */ public String getSubject() { return amqpAnnotatedMessage.getProperties().getSubject(); } /** * Sets the subject for the message. * * @param subject The application specific subject. * * @return The updated {@link EventData} object. */ public EventData setSubject(String subject) { amqpAnnotatedMessage.getProperties().setSubject(subject); return this; } /** * Gets the "to" address. * * <p> * This property is reserved for future use in routing scenarios and presently ignored by the broker itself. * Applications can use this value in rule-driven * auto-forward scenarios to indicate the intended logical destination of the message. * </p> * * @return "To" property value of this message */ public String getTo() { String to = null; AmqpAddress amqpAddress = amqpAnnotatedMessage.getProperties().getTo(); if (amqpAddress != null) { to = amqpAddress.toString(); } return to; } /** * Sets the "to" address. * * <p> * This property is reserved for future use in routing scenarios and presently ignored by the broker itself. * Applications can use this value in rule-driven * auto-forward chaining scenarios to indicate the intended logical destination of the message. * </p> * * @param to To property value of this message. * * @return The updated {@link EventData}. */ public EventData setTo(String to) { AmqpAddress toAddress = null; if (to != null) { toAddress = new AmqpAddress(to); } amqpAnnotatedMessage.getProperties().setTo(toAddress); return this; } /** * Gets the address of an entity to send replies to. * <p> * This optional and application-defined value is a standard way to express a reply path to the receiver of the * message. When a sender expects a reply, it sets the value to the absolute or relative path of the queue or topic * it expects the reply to be sent to. * * @return ReplyTo property value of this message */ public String getReplyTo() { String replyTo = null; AmqpAddress amqpAddress = amqpAnnotatedMessage.getProperties().getReplyTo(); if (amqpAddress != null) { replyTo = amqpAddress.toString(); } return replyTo; } /** * Sets the address of an entity to send replies to. * * @param replyTo ReplyTo property value of this message * * @return The updated {@link EventData}. * @see */ public EventData setReplyTo(String replyTo) { AmqpAddress replyToAddress = null; if (replyTo != null) { replyToAddress = new AmqpAddress(replyTo); } amqpAnnotatedMessage.getProperties().setReplyTo(replyToAddress); return this; } /** * Gets the duration before this message expires. * <p> * This value is the relative duration after which the message expires, starting from the instant the message has * been accepted and stored by the broker, as captured in {@link * explicitly, the assumed value is the DefaultTimeToLive set for the respective queue or topic. A message-level * TimeToLive value cannot be longer than the entity's DefaultTimeToLive setting and it is silently adjusted if it * does. * * @return Time to live duration of this message */ public Duration getTimeToLive() { return amqpAnnotatedMessage.getHeader().getTimeToLive(); } /** * Sets the duration of time before this message expires. * * @param timeToLive Time to Live duration of this message * * @return The updated {@link EventData}. * @see */ public EventData setTimeToLive(Duration timeToLive) { amqpAnnotatedMessage.getHeader().setTimeToLive(timeToLive); return this; } /** * Gets the session identifier for a session-aware entity. * * <p> * For session-aware entities, this application-defined value specifies the session affiliation of the message. * Messages with the same session identifier are subject to summary locking and enable exact in-order processing and * demultiplexing. For session-unaware entities, this value is ignored. * </p> * * @return The session id of the {@link EventData}. * @see <a href="https: */ public String getSessionId() { return amqpAnnotatedMessage.getProperties().getGroupId(); } /** * Sets the session identifier for a session-aware entity. * * @param sessionId The session identifier to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code sessionId} is too long or if the {@code sessionId} does not match * the {@code partitionKey}. */ public EventData setSessionId(String sessionId) { checkIdLength("sessionId", sessionId, MAX_SESSION_ID_LENGTH); checkSessionId(sessionId); amqpAnnotatedMessage.getProperties().setGroupId(sessionId); return this; } /** * Gets the scheduled enqueue time of this message. * <p> * This value is used for delayed message availability. The message is safely added to the queue, but is not * considered active and therefore not retrievable until the scheduled enqueue time. Mind that the message may not * be activated (enqueued) at the exact given datetime; the actual activation time depends on the queue's workload * and its state. * </p> * * @return the datetime at which the message will be enqueued in Azure Service Bus */ public OffsetDateTime getScheduledEnqueueTime() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SCHEDULED_ENQUEUE_UTC_TIME_NAME.getValue()); return value != null ? ((OffsetDateTime) value).toInstant().atOffset(ZoneOffset.UTC) : null; } /** * Sets the scheduled enqueue time of this message. A {@code null} will not be set. If this value needs to be unset * it could be done by value removing from {@link AmqpAnnotatedMessage * AmqpMessageConstant * * @param scheduledEnqueueTime the datetime at which this message should be enqueued in Azure Service Bus. * * @return The updated {@link EventData}. * @see */ public EventData setScheduledEnqueueTime(OffsetDateTime scheduledEnqueueTime) { if (scheduledEnqueueTime != null) { amqpAnnotatedMessage.getMessageAnnotations().put(SCHEDULED_ENQUEUE_UTC_TIME_NAME.getValue(), scheduledEnqueueTime); } return this; } /** * Sets a partition key for sending a message to a partitioned entity * * @param partitionKey The partition key of this message. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code partitionKey} is too long or if the {@code partitionKey} does not * match the {@code sessionId}. * @see */ public EventData setPartitionKey(String partitionKey) { checkIdLength("partitionKey", partitionKey, MAX_PARTITION_KEY_LENGTH); checkPartitionKey(partitionKey); amqpAnnotatedMessage.getMessageAnnotations().put(PARTITION_KEY_ANNOTATION_NAME.getValue(), partitionKey); return this; } /** * Gets or sets a session identifier augmenting the {@link * <p> * This value augments the {@link * be set for the reply when sent to the reply entity. * * @return The {@code getReplyToGroupId} property value of this message. */ public String getReplyToSessionId() { return amqpAnnotatedMessage.getProperties().getReplyToGroupId(); } /** * Gets or sets a session identifier augmenting the {@link * * @param replyToSessionId The ReplyToGroupId property value of this message. * * @return The updated {@link EventData}. */ public EventData setReplyToSessionId(String replyToSessionId) { amqpAnnotatedMessage.getProperties().setReplyToGroupId(replyToSessionId); return this; } /** * Gets the {@link AmqpAnnotatedMessage}. * * @return The raw AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return amqpAnnotatedMessage; } /** * Validates that the user can't set the partitionKey to a different value than the session ID. (this will * eventually migrate to a service-side check) */ private void checkSessionId(String proposedSessionId) { if (proposedSessionId == null) { return; } if (this.getPartitionKey() != null && this.getPartitionKey().compareTo(proposedSessionId) != 0) { final String message = String.format( "sessionId:%s cannot be set to a different value than partitionKey:%s.", proposedSessionId, this.getPartitionKey()); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * Checks the length of ID fields. * * Some fields within the message will cause a failure in the service without enough context information. */ private void checkIdLength(String fieldName, String value, int maxLength) { if (value != null && value.length() > maxLength) { final String message = String.format("%s cannot be longer than %d characters.", fieldName, maxLength); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * Validates that the user can't set the partitionKey to a different value than the session ID. (this will * eventually migrate to a service-side check) */ private void checkPartitionKey(String proposedPartitionKey) { if (proposedPartitionKey == null) { return; } if (this.getSessionId() != null && this.getSessionId().compareTo(proposedPartitionKey) != 0) { final String message = String.format( "partitionKey:%s cannot be set to a different value than sessionId:%s.", proposedPartitionKey, this.getSessionId()); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } }
class EventData { /* * These are properties owned by the service and set when a message is received. */ static final Set<String> RESERVED_SYSTEM_PROPERTIES; private final Map<String, Object> properties; private final SystemProperties systemProperties; private final AmqpAnnotatedMessage annotatedMessage; private Context context; static { final Set<String> properties = new HashSet<>(); properties.add(OFFSET_ANNOTATION_NAME.getValue()); properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue()); properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); properties.add(PUBLISHER_ANNOTATION_NAME.getValue()); RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this.context = Context.NONE; final AmqpMessageBody messageBody = AmqpMessageBody.fromData( Objects.requireNonNull(body, "'body' cannot be null.")); this.annotatedMessage = new AmqpAnnotatedMessage(messageBody); this.properties = annotatedMessage.getApplicationProperties(); this.systemProperties = new SystemProperties(); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(Objects.requireNonNull(body, "'body' cannot be null.").toBytes()); } /** * Creates an event with the given {@code body}, system properties and context. Used in the case where a message * is received from the service. * * @param context A specified key-value pair of type {@link Context}. * @param amqpAnnotatedMessage Backing annotated message. * * @throws NullPointerException if {@code amqpAnnotatedMessage} or {@code context} is {@code null}. * @throws IllegalArgumentException if {@code amqpAnnotatedMessage}'s body type is unknown. */ EventData(AmqpAnnotatedMessage amqpAnnotatedMessage, SystemProperties systemProperties, Context context) { this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.properties = Collections.unmodifiableMap(amqpAnnotatedMessage.getApplicationProperties()); this.annotatedMessage = Objects.requireNonNull(amqpAnnotatedMessage, "'amqpAnnotatedMessage' cannot be null."); this.systemProperties = systemProperties; switch (annotatedMessage.getBody().getBodyType()) { case DATA: break; case SEQUENCE: case VALUE: new ClientLogger(EventData.class).warning("Message body type '{}' is not supported in EH. " + " Getting contents of body may throw.", annotatedMessage.getBody().getBodyType()); break; default: throw new ClientLogger(EventData.class).logExceptionAsError(new IllegalArgumentException( "Body type not valid " + annotatedMessage.getBody().getBodyType())); } } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. For received {@link EventData}, the map is * a read-only view. */ public Map<String, Object> getProperties() { return properties; } /** * Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are * only present on a <b>received</b> {@link EventData}. * * @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. {@code * null} if the {@link EventData} is not received from the Event Hubs service. */ public Map<String, Object> getSystemProperties() { return systemProperties; } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ public byte[] getBody() { return annotatedMessage.getBody().getFirstData(); } /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(annotatedMessage.getBody().getFirstData(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { return systemProperties.getOffset(); } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return systemProperties.getPartitionKey(); } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { return systemProperties.getEnqueuedTime(); } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> {@link * EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { return systemProperties.getSequenceNumber(); } /** * Gets the underlying AMQP message. * * @return The underlying AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return annotatedMessage; } /** * Gets the content type. * * @return The content type. */ public String getContentType() { return annotatedMessage.getProperties().getContentType(); } /** * Sets the content type. * * @param contentType The content type. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { annotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets the correlation id. * * @return The correlation id. {@code null} if there is none set. */ public String getCorrelationId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getCorrelationId(); return messageId != null ? messageId.toString() : null; } /** * Sets the correlation id. * * @param correlationId The correlation id. * * @return The updated {@link EventData}. */ public EventData setCorrelationId(String correlationId) { final AmqpMessageId id = correlationId != null ? new AmqpMessageId(correlationId) : null; annotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the message id. * * @return The message id. {@code null} if there is none set. */ public String getMessageId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getMessageId(); return messageId != null ? messageId.toString() : null; } /** * Sets the message id. * * @param messageId The message id. * * @return The updated {@link EventData}. */ public EventData setMessageId(String messageId) { final AmqpMessageId id = messageId != null ? new AmqpMessageId(messageId) : null; annotatedMessage.getProperties().setMessageId(id); return this; } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(annotatedMessage.getBody().getFirstData(), eventData.annotatedMessage.getBody().getFirstData()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(annotatedMessage.getBody().getFirstData()); } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * * @return The updated {@link EventData}. * * @throws NullPointerException if {@code key} or {@code value} is null. */ public EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } }
According to your comment, fixed within new version.
public byte[] getBody() { final AmqpMessageBodyType type = amqpAnnotatedMessage.getBody().getBodyType(); switch (type) { case DATA: return amqpAnnotatedMessage.getBody().getFirstData(); case SEQUENCE: case VALUE: throw logger.logExceptionAsError(new UnsupportedOperationException("Not supported AmqpBodyType: " + type.toString())); default: throw logger.logExceptionAsError(new IllegalArgumentException("Unknown AmqpBodyType: " + type.toString())); } }
case DATA:
public byte[] getBody() { return annotatedMessage.getBody().getFirstData(); }
class EventData { private static final int MAX_MESSAGE_ID_LENGTH = 128; private static final int MAX_PARTITION_KEY_LENGTH = 128; private static final int MAX_SESSION_ID_LENGTH = 128; private final BinaryData body; private final AmqpAnnotatedMessage amqpAnnotatedMessage; private final ClientLogger logger = new ClientLogger(EventData.class); private Context context; /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null."))); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(body, Context.NONE); } /** * Creates an event with the given {@code body}, system properties and context. * * @param body The data to set for this event. * @param context A specified key-value pair of type {@link Context}. * @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}. */ EventData(BinaryData body, Context context) { this.body = Objects.requireNonNull(body, "'body' cannot be null."); this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.amqpAnnotatedMessage = new AmqpAnnotatedMessage(AmqpMessageBody.fromData(body.toBytes())); } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. */ public Map<String, Object> getProperties() { return amqpAnnotatedMessage.getApplicationProperties(); } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(body.toBytes(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ public BinaryData getBodyAsBinaryData() { return body; } /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(OFFSET_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * Sets the offset of the event when it was received from the associated Event Hub partition. * * @param offset Offset value of this message * * @return The updated {@link EventData}. * @see */ public EventData setOffset(Long offset) { amqpAnnotatedMessage.getMessageAnnotations().put(OFFSET_ANNOTATION_NAME.getValue(), offset); return this; } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return (String) amqpAnnotatedMessage.getMessageAnnotations().get(PARTITION_KEY_ANNOTATION_NAME.getValue()); } /** * Sets the instant, in UTC, of when the event was enqueued in the Event Hub partition. * * @param enqueuedTime Enqueued time of this message * * @return The updated {@link EventData}. * @see */ public EventData setEnqueuedTime(Instant enqueuedTime) { amqpAnnotatedMessage.getMessageAnnotations().put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), enqueuedTime); return this; } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); return value != null ? ((Date) value).toInstant() : null; } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> * {@link EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * Sets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. * * @param sequenceNumber Sequence number of this message * * @return The updated {@link EventData}. * @see */ public EventData setSequenceNumber(Long sequenceNumber) { amqpAnnotatedMessage.getMessageAnnotations().put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), sequenceNumber); return this; } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(body.toBytes(), eventData.body.toBytes()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(body.toBytes()); } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * @throws NullPointerException if {@code key} or {@code value} is null. * @return The updated {@link EventData}. */ public EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } /** * Gets the content type of the message. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * @return The content type of the {@link EventData}. */ public String getContentType() { return amqpAnnotatedMessage.getProperties().getContentType(); } /** * Sets the content type of the {@link EventData}. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * * @param contentType RFC2045 Content-Type descriptor of the message. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { amqpAnnotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets a correlation identifier. * <p> * Allows an application to specify a context for the message for the purposes of correlation, for example * reflecting the MessageId of a message that is being replied to. * </p> * * @return The correlation id of this message. */ public String getCorrelationId() { String correlationId = null; AmqpMessageId amqpCorrelationId = amqpAnnotatedMessage.getProperties().getCorrelationId(); if (amqpCorrelationId != null) { correlationId = amqpCorrelationId.toString(); } return correlationId; } /** * Sets a correlation identifier. * * @param correlationId correlation id of this message * * @return The updated {@link EventData}. * @see */ public EventData setCorrelationId(String correlationId) { AmqpMessageId id = null; if (correlationId != null) { id = new AmqpMessageId(correlationId); } amqpAnnotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the message id. * * <p> * The message identifier is an application-defined value that uniquely identifies the message and its payload. The * identifier is a free-form string and can reflect a GUID or an identifier derived from the application context. * </p> * * @return Id of the {@link EventData}. */ public byte[] getUserId() { return amqpAnnotatedMessage.getProperties().getUserId(); } /** * Sets the message id. * * @param userId The message id to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code messageId} is too long. */ public EventData setUserId(byte[] userId) { amqpAnnotatedMessage.getProperties().setUserId(userId); return this; } /** * Gets the message id. * * <p> * The message identifier is an application-defined value that uniquely identifies the message and its payload. The * identifier is a free-form string and can reflect a GUID or an identifier derived from the application context. * </p> * * @return Id of the {@link EventData}. */ public String getMessageId() { String messageId = null; AmqpMessageId amqpMessageId = amqpAnnotatedMessage.getProperties().getMessageId(); if (amqpMessageId != null) { messageId = amqpMessageId.toString(); } return messageId; } /** * Sets the message id. * * @param messageId The message id to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code messageId} is too long. */ public EventData setMessageId(String messageId) { checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH); AmqpMessageId id = null; if (messageId != null) { id = new AmqpMessageId(messageId); } amqpAnnotatedMessage.getProperties().setMessageId(id); return this; } /** * Gets the subject for the message. * * <p> * This property enables the application to indicate the purpose of the message to the receiver in a standardized * fashion, similar to an email subject line. The mapped AMQP property is "subject". * </p> * * @return The subject for the message. */ public String getSubject() { return amqpAnnotatedMessage.getProperties().getSubject(); } /** * Sets the subject for the message. * * @param subject The application specific subject. * * @return The updated {@link EventData} object. */ public EventData setSubject(String subject) { amqpAnnotatedMessage.getProperties().setSubject(subject); return this; } /** * Gets the "to" address. * * <p> * This property is reserved for future use in routing scenarios and presently ignored by the broker itself. * Applications can use this value in rule-driven * auto-forward scenarios to indicate the intended logical destination of the message. * </p> * * @return "To" property value of this message */ public String getTo() { String to = null; AmqpAddress amqpAddress = amqpAnnotatedMessage.getProperties().getTo(); if (amqpAddress != null) { to = amqpAddress.toString(); } return to; } /** * Sets the "to" address. * * <p> * This property is reserved for future use in routing scenarios and presently ignored by the broker itself. * Applications can use this value in rule-driven * auto-forward chaining scenarios to indicate the intended logical destination of the message. * </p> * * @param to To property value of this message. * * @return The updated {@link EventData}. */ public EventData setTo(String to) { AmqpAddress toAddress = null; if (to != null) { toAddress = new AmqpAddress(to); } amqpAnnotatedMessage.getProperties().setTo(toAddress); return this; } /** * Gets the address of an entity to send replies to. * <p> * This optional and application-defined value is a standard way to express a reply path to the receiver of the * message. When a sender expects a reply, it sets the value to the absolute or relative path of the queue or topic * it expects the reply to be sent to. * * @return ReplyTo property value of this message */ public String getReplyTo() { String replyTo = null; AmqpAddress amqpAddress = amqpAnnotatedMessage.getProperties().getReplyTo(); if (amqpAddress != null) { replyTo = amqpAddress.toString(); } return replyTo; } /** * Sets the address of an entity to send replies to. * * @param replyTo ReplyTo property value of this message * * @return The updated {@link EventData}. * @see */ public EventData setReplyTo(String replyTo) { AmqpAddress replyToAddress = null; if (replyTo != null) { replyToAddress = new AmqpAddress(replyTo); } amqpAnnotatedMessage.getProperties().setReplyTo(replyToAddress); return this; } /** * Gets the duration before this message expires. * <p> * This value is the relative duration after which the message expires, starting from the instant the message has * been accepted and stored by the broker, as captured in {@link * explicitly, the assumed value is the DefaultTimeToLive set for the respective queue or topic. A message-level * TimeToLive value cannot be longer than the entity's DefaultTimeToLive setting and it is silently adjusted if it * does. * * @return Time to live duration of this message */ public Duration getTimeToLive() { return amqpAnnotatedMessage.getHeader().getTimeToLive(); } /** * Sets the duration of time before this message expires. * * @param timeToLive Time to Live duration of this message * * @return The updated {@link EventData}. * @see */ public EventData setTimeToLive(Duration timeToLive) { amqpAnnotatedMessage.getHeader().setTimeToLive(timeToLive); return this; } /** * Gets the session identifier for a session-aware entity. * * <p> * For session-aware entities, this application-defined value specifies the session affiliation of the message. * Messages with the same session identifier are subject to summary locking and enable exact in-order processing and * demultiplexing. For session-unaware entities, this value is ignored. * </p> * * @return The session id of the {@link EventData}. * @see <a href="https: */ public String getSessionId() { return amqpAnnotatedMessage.getProperties().getGroupId(); } /** * Sets the session identifier for a session-aware entity. * * @param sessionId The session identifier to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code sessionId} is too long or if the {@code sessionId} does not match * the {@code partitionKey}. */ public EventData setSessionId(String sessionId) { checkIdLength("sessionId", sessionId, MAX_SESSION_ID_LENGTH); checkSessionId(sessionId); amqpAnnotatedMessage.getProperties().setGroupId(sessionId); return this; } /** * Gets the scheduled enqueue time of this message. * <p> * This value is used for delayed message availability. The message is safely added to the queue, but is not * considered active and therefore not retrievable until the scheduled enqueue time. Mind that the message may not * be activated (enqueued) at the exact given datetime; the actual activation time depends on the queue's workload * and its state. * </p> * * @return the datetime at which the message will be enqueued in Azure Service Bus */ public OffsetDateTime getScheduledEnqueueTime() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SCHEDULED_ENQUEUE_UTC_TIME_NAME.getValue()); return value != null ? ((OffsetDateTime) value).toInstant().atOffset(ZoneOffset.UTC) : null; } /** * Sets the scheduled enqueue time of this message. A {@code null} will not be set. If this value needs to be unset * it could be done by value removing from {@link AmqpAnnotatedMessage * AmqpMessageConstant * * @param scheduledEnqueueTime the datetime at which this message should be enqueued in Azure Service Bus. * * @return The updated {@link EventData}. * @see */ public EventData setScheduledEnqueueTime(OffsetDateTime scheduledEnqueueTime) { if (scheduledEnqueueTime != null) { amqpAnnotatedMessage.getMessageAnnotations().put(SCHEDULED_ENQUEUE_UTC_TIME_NAME.getValue(), scheduledEnqueueTime); } return this; } /** * Sets a partition key for sending a message to a partitioned entity * * @param partitionKey The partition key of this message. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code partitionKey} is too long or if the {@code partitionKey} does not * match the {@code sessionId}. * @see */ public EventData setPartitionKey(String partitionKey) { checkIdLength("partitionKey", partitionKey, MAX_PARTITION_KEY_LENGTH); checkPartitionKey(partitionKey); amqpAnnotatedMessage.getMessageAnnotations().put(PARTITION_KEY_ANNOTATION_NAME.getValue(), partitionKey); return this; } /** * Gets or sets a session identifier augmenting the {@link * <p> * This value augments the {@link * be set for the reply when sent to the reply entity. * * @return The {@code getReplyToGroupId} property value of this message. */ public String getReplyToSessionId() { return amqpAnnotatedMessage.getProperties().getReplyToGroupId(); } /** * Gets or sets a session identifier augmenting the {@link * * @param replyToSessionId The ReplyToGroupId property value of this message. * * @return The updated {@link EventData}. */ public EventData setReplyToSessionId(String replyToSessionId) { amqpAnnotatedMessage.getProperties().setReplyToGroupId(replyToSessionId); return this; } /** * Gets the {@link AmqpAnnotatedMessage}. * * @return The raw AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return amqpAnnotatedMessage; } /** * Validates that the user can't set the partitionKey to a different value than the session ID. (this will * eventually migrate to a service-side check) */ private void checkSessionId(String proposedSessionId) { if (proposedSessionId == null) { return; } if (this.getPartitionKey() != null && this.getPartitionKey().compareTo(proposedSessionId) != 0) { final String message = String.format( "sessionId:%s cannot be set to a different value than partitionKey:%s.", proposedSessionId, this.getPartitionKey()); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * Checks the length of ID fields. * * Some fields within the message will cause a failure in the service without enough context information. */ private void checkIdLength(String fieldName, String value, int maxLength) { if (value != null && value.length() > maxLength) { final String message = String.format("%s cannot be longer than %d characters.", fieldName, maxLength); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * Validates that the user can't set the partitionKey to a different value than the session ID. (this will * eventually migrate to a service-side check) */ private void checkPartitionKey(String proposedPartitionKey) { if (proposedPartitionKey == null) { return; } if (this.getSessionId() != null && this.getSessionId().compareTo(proposedPartitionKey) != 0) { final String message = String.format( "partitionKey:%s cannot be set to a different value than sessionId:%s.", proposedPartitionKey, this.getSessionId()); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } }
class EventData { /* * These are properties owned by the service and set when a message is received. */ static final Set<String> RESERVED_SYSTEM_PROPERTIES; private final Map<String, Object> properties; private final SystemProperties systemProperties; private final AmqpAnnotatedMessage annotatedMessage; private Context context; static { final Set<String> properties = new HashSet<>(); properties.add(OFFSET_ANNOTATION_NAME.getValue()); properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue()); properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); properties.add(PUBLISHER_ANNOTATION_NAME.getValue()); RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this.context = Context.NONE; final AmqpMessageBody messageBody = AmqpMessageBody.fromData( Objects.requireNonNull(body, "'body' cannot be null.")); this.annotatedMessage = new AmqpAnnotatedMessage(messageBody); this.properties = annotatedMessage.getApplicationProperties(); this.systemProperties = new SystemProperties(); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(Objects.requireNonNull(body, "'body' cannot be null.").toBytes()); } /** * Creates an event with the given {@code body}, system properties and context. Used in the case where a message * is received from the service. * * @param context A specified key-value pair of type {@link Context}. * @param amqpAnnotatedMessage Backing annotated message. * * @throws NullPointerException if {@code amqpAnnotatedMessage} or {@code context} is {@code null}. * @throws IllegalArgumentException if {@code amqpAnnotatedMessage}'s body type is unknown. */ EventData(AmqpAnnotatedMessage amqpAnnotatedMessage, SystemProperties systemProperties, Context context) { this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.properties = Collections.unmodifiableMap(amqpAnnotatedMessage.getApplicationProperties()); this.annotatedMessage = Objects.requireNonNull(amqpAnnotatedMessage, "'amqpAnnotatedMessage' cannot be null."); this.systemProperties = systemProperties; switch (annotatedMessage.getBody().getBodyType()) { case DATA: break; case SEQUENCE: case VALUE: new ClientLogger(EventData.class).warning("Message body type '{}' is not supported in EH. " + " Getting contents of body may throw.", annotatedMessage.getBody().getBodyType()); break; default: throw new ClientLogger(EventData.class).logExceptionAsError(new IllegalArgumentException( "Body type not valid " + annotatedMessage.getBody().getBodyType())); } } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. For received {@link EventData}, the map is * a read-only view. */ public Map<String, Object> getProperties() { return properties; } /** * Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are * only present on a <b>received</b> {@link EventData}. * * @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. {@code * null} if the {@link EventData} is not received from the Event Hubs service. */ public Map<String, Object> getSystemProperties() { return systemProperties; } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(annotatedMessage.getBody().getFirstData(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ public BinaryData getBodyAsBinaryData() { return BinaryData.fromBytes(annotatedMessage.getBody().getFirstData()); } /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { return systemProperties.getOffset(); } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return systemProperties.getPartitionKey(); } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { return systemProperties.getEnqueuedTime(); } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> {@link * EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { return systemProperties.getSequenceNumber(); } /** * Gets the underlying AMQP message. * * @return The underlying AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return annotatedMessage; } /** * Gets the content type. * * @return The content type. */ public String getContentType() { return annotatedMessage.getProperties().getContentType(); } /** * Sets the content type. * * @param contentType The content type. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { annotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets the correlation id. * * @return The correlation id. {@code null} if there is none set. */ public String getCorrelationId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getCorrelationId(); return messageId != null ? messageId.toString() : null; } /** * Sets the correlation id. * * @param correlationId The correlation id. * * @return The updated {@link EventData}. */ public EventData setCorrelationId(String correlationId) { final AmqpMessageId id = correlationId != null ? new AmqpMessageId(correlationId) : null; annotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the message id. * * @return The message id. {@code null} if there is none set. */ public String getMessageId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getMessageId(); return messageId != null ? messageId.toString() : null; } /** * Sets the message id. * * @param messageId The message id. * * @return The updated {@link EventData}. */ public EventData setMessageId(String messageId) { final AmqpMessageId id = messageId != null ? new AmqpMessageId(messageId) : null; annotatedMessage.getProperties().setMessageId(id); return this; } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(annotatedMessage.getBody().getFirstData(), eventData.annotatedMessage.getBody().getFirstData()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(annotatedMessage.getBody().getFirstData()); } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * * @return The updated {@link EventData}. * * @throws NullPointerException if {@code key} or {@code value} is null. */ public EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } }
I believe we should not be performing this validation, as mentioned in an earlier comment.
public EventData setMessageId(String messageId) { checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH); AmqpMessageId id = null; if (messageId != null) { id = new AmqpMessageId(messageId); } amqpAnnotatedMessage.getProperties().setMessageId(id); return this; }
checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH);
public EventData setMessageId(String messageId) { final AmqpMessageId id = messageId != null ? new AmqpMessageId(messageId) : null; annotatedMessage.getProperties().setMessageId(id); return this; }
class EventData { private static final int MAX_MESSAGE_ID_LENGTH = 128; private static final int MAX_PARTITION_KEY_LENGTH = 128; private final BinaryData body; private final AmqpAnnotatedMessage amqpAnnotatedMessage; private final ClientLogger logger = new ClientLogger(EventData.class); private final SystemProperties systemProperties; private Context context; /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null."))); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(body, new SystemProperties(), Context.NONE); } /** * Creates an event with the given {@code body}, system properties and context. * * @param body The data to set for this event. * @param systemProperties System properties set by message broker for this event. * @param context A specified key-value pair of type {@link Context}. * @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}. */ EventData(BinaryData body, SystemProperties systemProperties, Context context) { this.body = Objects.requireNonNull(body, "'body' cannot be null."); this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.systemProperties = Objects.requireNonNull(systemProperties, "'systemProperties' cannot be null."); this.amqpAnnotatedMessage = new AmqpAnnotatedMessage(AmqpMessageBody.fromData(body.toBytes())); if (Objects.nonNull(this.systemProperties.getOffset())) { amqpAnnotatedMessage.getMessageAnnotations().put(OFFSET_ANNOTATION_NAME.getValue(), this.systemProperties.getOffset()); } if (Objects.nonNull(this.systemProperties.getSequenceNumber())) { amqpAnnotatedMessage.getMessageAnnotations().put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.systemProperties.getSequenceNumber()); } if (Objects.nonNull(this.systemProperties.getPartitionKey())) { String partitionKey = this.systemProperties.getPartitionKey(); checkIdLength("partitionKey", partitionKey, MAX_PARTITION_KEY_LENGTH); checkPartitionKey(partitionKey); amqpAnnotatedMessage.getMessageAnnotations().put(PARTITION_KEY_ANNOTATION_NAME.getValue(), partitionKey); } if (Objects.nonNull(this.systemProperties.getEnqueuedTime())) { amqpAnnotatedMessage.getMessageAnnotations().put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.systemProperties.getEnqueuedTime()); } } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. */ public Map<String, Object> getProperties() { return amqpAnnotatedMessage.getApplicationProperties(); } /** * Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are * only present on a <b>received</b> {@link EventData}. * * @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. * {@code null} if the {@link EventData} is not received from the Event Hubs service. */ public Map<String, Object> getSystemProperties() { return systemProperties; } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ public byte[] getBody() { final AmqpMessageBodyType type = amqpAnnotatedMessage.getBody().getBodyType(); switch (type) { case DATA: return amqpAnnotatedMessage.getBody().getFirstData(); case SEQUENCE: case VALUE: throw logger.logExceptionAsError(new UnsupportedOperationException("Not supported AmqpBodyType: " + type.toString())); default: throw logger.logExceptionAsError(new IllegalArgumentException("Unknown AmqpBodyType: " + type.toString())); } } /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(body.toBytes(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ public BinaryData getBodyAsBinaryData() { return body; } /** * Gets the content type of the message. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * @return The content type of the {@link EventData}. */ public String getContentType() { return amqpAnnotatedMessage.getProperties().getContentType(); } /** * Sets the content type of the {@link EventData}. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * * @param contentType RFC2045 Content-Type descriptor of the message. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { amqpAnnotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets a correlation identifier. * <p> * Allows an application to specify a context for the message for the purposes of correlation, for example * reflecting the MessageId of a message that is being replied to. * </p> * * @return The correlation id of this message. */ public String getCorrelationId() { String correlationId = null; AmqpMessageId amqpCorrelationId = amqpAnnotatedMessage.getProperties().getCorrelationId(); if (amqpCorrelationId != null) { correlationId = amqpCorrelationId.toString(); } return correlationId; } /** * Sets a correlation identifier. * * @param correlationId correlation id of this message * * @return The updated {@link EventData}. * @see */ public EventData setCorrelationId(String correlationId) { AmqpMessageId id = null; if (correlationId != null) { id = new AmqpMessageId(correlationId); } amqpAnnotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); return value != null ? ((Date) value).toInstant() : null; } /** * Gets the message id. * * <p> * The message identifier is an application-defined value that uniquely identifies the message and its payload. The * identifier is a free-form string and can reflect a GUID or an identifier derived from the application context. * </p> * * @return Id of the {@link EventData}. */ public String getMessageId() { String messageId = null; AmqpMessageId amqpMessageId = amqpAnnotatedMessage.getProperties().getMessageId(); if (amqpMessageId != null) { messageId = amqpMessageId.toString(); } return messageId; } /** * Sets the message id. * * @param messageId The message id to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code messageId} is too long. */ /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(OFFSET_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return (String) amqpAnnotatedMessage.getMessageAnnotations().get(PARTITION_KEY_ANNOTATION_NAME.getValue()); } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> * {@link EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(body.toBytes()); } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(body.toBytes(), eventData.body.toBytes()); } /** * Gets the {@link AmqpAnnotatedMessage}. * * @return The raw AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return amqpAnnotatedMessage; } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * @throws NullPointerException if {@code key} or {@code value} is null. * @return The updated {@link EventData}. */ EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } /** * Checks the length of ID fields. * * Some fields within the message will cause a failure in the service without enough context information. */ private void checkIdLength(String fieldName, String value, int maxLength) { if (value != null && value.length() > maxLength) { final String message = String.format("%s cannot be longer than %d characters.", fieldName, maxLength); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * Validates that the user can't set the partitionKey to a different value than the session ID. (this will * eventually migrate to a service-side check) */ private void checkPartitionKey(String proposedPartitionKey) { if (proposedPartitionKey == null) { return; } if (amqpAnnotatedMessage.getProperties().getGroupId() != null && amqpAnnotatedMessage.getProperties().getGroupId().compareTo(proposedPartitionKey) != 0) { final String message = String.format( "partitionKey:%s cannot be set to a different value than sessionId:%s.", proposedPartitionKey, amqpAnnotatedMessage.getProperties().getGroupId()); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * A collection of properties populated by Azure Event Hubs service. */ static class SystemProperties extends HashMap<String, Object> { private static final long serialVersionUID = -2827050124966993723L; private final Long offset; private final String partitionKey; private final Instant enqueuedTime; private final Long sequenceNumber; SystemProperties() { super(); offset = null; partitionKey = null; enqueuedTime = null; sequenceNumber = null; } SystemProperties(final Map<String, Object> map) { super(map); this.partitionKey = removeSystemProperty(PARTITION_KEY_ANNOTATION_NAME.getValue()); final Long offset = removeSystemProperty(OFFSET_ANNOTATION_NAME.getValue()); if (offset == null) { throw new IllegalStateException(String.format(Locale.US, "offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue())); } this.offset = offset; put(OFFSET_ANNOTATION_NAME.getValue(), this.offset); final Date enqueuedTimeValue = removeSystemProperty(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); if (enqueuedTimeValue == null) { throw new IllegalStateException(String.format(Locale.US, "enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue())); } this.enqueuedTime = enqueuedTimeValue.toInstant(); put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.enqueuedTime); final Long sequenceNumber = removeSystemProperty(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); if (sequenceNumber == null) { throw new IllegalStateException(String.format(Locale.US, "sequenceNumber: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue())); } this.sequenceNumber = sequenceNumber; put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.sequenceNumber); } /** * Gets the offset within the Event Hubs stream. * * @return The offset within the Event Hubs stream. */ private Long getOffset() { return offset; } /** * Gets a partition key used for message partitioning. If it exists, this value was used to compute a hash to * select a partition to send the message to. * * @return A partition key for this Event Data. */ private String getPartitionKey() { return partitionKey; } /** * Gets the time this event was enqueued in the Event Hub. * * @return The time this was enqueued in the service. */ private Instant getEnqueuedTime() { return enqueuedTime; } /** * Gets the sequence number in the event stream for this event. This is unique for every message received in the * Event Hub. * * @return Sequence number for this event. * @throws IllegalStateException if {@link SystemProperties} does not contain the sequence number in a retrieved * event. */ private Long getSequenceNumber() { return sequenceNumber; } @SuppressWarnings("unchecked") private <T> T removeSystemProperty(final String key) { if (this.containsKey(key)) { return (T) (this.remove(key)); } return null; } } }
class EventData { /* * These are properties owned by the service and set when a message is received. */ static final Set<String> RESERVED_SYSTEM_PROPERTIES; private final Map<String, Object> properties; private final SystemProperties systemProperties; private final AmqpAnnotatedMessage annotatedMessage; private Context context; static { final Set<String> properties = new HashSet<>(); properties.add(OFFSET_ANNOTATION_NAME.getValue()); properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue()); properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); properties.add(PUBLISHER_ANNOTATION_NAME.getValue()); RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this.context = Context.NONE; final AmqpMessageBody messageBody = AmqpMessageBody.fromData( Objects.requireNonNull(body, "'body' cannot be null.")); this.annotatedMessage = new AmqpAnnotatedMessage(messageBody); this.properties = annotatedMessage.getApplicationProperties(); this.systemProperties = new SystemProperties(); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(Objects.requireNonNull(body, "'body' cannot be null.").toBytes()); } /** * Creates an event with the given {@code body}, system properties and context. Used in the case where a message * is received from the service. * * @param context A specified key-value pair of type {@link Context}. * @param amqpAnnotatedMessage Backing annotated message. * * @throws NullPointerException if {@code amqpAnnotatedMessage} or {@code context} is {@code null}. * @throws IllegalArgumentException if {@code amqpAnnotatedMessage}'s body type is unknown. */ EventData(AmqpAnnotatedMessage amqpAnnotatedMessage, SystemProperties systemProperties, Context context) { this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.properties = Collections.unmodifiableMap(amqpAnnotatedMessage.getApplicationProperties()); this.annotatedMessage = Objects.requireNonNull(amqpAnnotatedMessage, "'amqpAnnotatedMessage' cannot be null."); this.systemProperties = systemProperties; switch (annotatedMessage.getBody().getBodyType()) { case DATA: break; case SEQUENCE: case VALUE: new ClientLogger(EventData.class).warning("Message body type '{}' is not supported in EH. " + " Getting contents of body may throw.", annotatedMessage.getBody().getBodyType()); break; default: throw new ClientLogger(EventData.class).logExceptionAsError(new IllegalArgumentException( "Body type not valid " + annotatedMessage.getBody().getBodyType())); } } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. For received {@link EventData}, the map is * a read-only view. */ public Map<String, Object> getProperties() { return properties; } /** * Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are * only present on a <b>received</b> {@link EventData}. * * @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. {@code * null} if the {@link EventData} is not received from the Event Hubs service. */ public Map<String, Object> getSystemProperties() { return systemProperties; } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ public byte[] getBody() { return annotatedMessage.getBody().getFirstData(); } /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(annotatedMessage.getBody().getFirstData(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ public BinaryData getBodyAsBinaryData() { return BinaryData.fromBytes(annotatedMessage.getBody().getFirstData()); } /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { return systemProperties.getOffset(); } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return systemProperties.getPartitionKey(); } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { return systemProperties.getEnqueuedTime(); } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> {@link * EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { return systemProperties.getSequenceNumber(); } /** * Gets the underlying AMQP message. * * @return The underlying AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return annotatedMessage; } /** * Gets the content type. * * @return The content type. */ public String getContentType() { return annotatedMessage.getProperties().getContentType(); } /** * Sets the content type. * * @param contentType The content type. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { annotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets the correlation id. * * @return The correlation id. {@code null} if there is none set. */ public String getCorrelationId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getCorrelationId(); return messageId != null ? messageId.toString() : null; } /** * Sets the correlation id. * * @param correlationId The correlation id. * * @return The updated {@link EventData}. */ public EventData setCorrelationId(String correlationId) { final AmqpMessageId id = correlationId != null ? new AmqpMessageId(correlationId) : null; annotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the message id. * * @return The message id. {@code null} if there is none set. */ public String getMessageId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getMessageId(); return messageId != null ? messageId.toString() : null; } /** * Sets the message id. * * @param messageId The message id. * * @return The updated {@link EventData}. */ /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(annotatedMessage.getBody().getFirstData(), eventData.annotatedMessage.getBody().getFirstData()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(annotatedMessage.getBody().getFirstData()); } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * * @return The updated {@link EventData}. * * @throws NullPointerException if {@code key} or {@code value} is null. */ public EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } }
According to your instructions, fix in new version.
public EventData setMessageId(String messageId) { checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH); AmqpMessageId id = null; if (messageId != null) { id = new AmqpMessageId(messageId); } amqpAnnotatedMessage.getProperties().setMessageId(id); return this; }
checkIdLength("messageId", messageId, MAX_MESSAGE_ID_LENGTH);
public EventData setMessageId(String messageId) { final AmqpMessageId id = messageId != null ? new AmqpMessageId(messageId) : null; annotatedMessage.getProperties().setMessageId(id); return this; }
class EventData { private static final int MAX_MESSAGE_ID_LENGTH = 128; private static final int MAX_PARTITION_KEY_LENGTH = 128; private final BinaryData body; private final AmqpAnnotatedMessage amqpAnnotatedMessage; private final ClientLogger logger = new ClientLogger(EventData.class); private final SystemProperties systemProperties; private Context context; /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this(BinaryData.fromBytes(Objects.requireNonNull(body, "'body' cannot be null."))); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(body, new SystemProperties(), Context.NONE); } /** * Creates an event with the given {@code body}, system properties and context. * * @param body The data to set for this event. * @param systemProperties System properties set by message broker for this event. * @param context A specified key-value pair of type {@link Context}. * @throws NullPointerException if {@code body}, {@code systemProperties}, or {@code context} is {@code null}. */ EventData(BinaryData body, SystemProperties systemProperties, Context context) { this.body = Objects.requireNonNull(body, "'body' cannot be null."); this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.systemProperties = Objects.requireNonNull(systemProperties, "'systemProperties' cannot be null."); this.amqpAnnotatedMessage = new AmqpAnnotatedMessage(AmqpMessageBody.fromData(body.toBytes())); if (Objects.nonNull(this.systemProperties.getOffset())) { amqpAnnotatedMessage.getMessageAnnotations().put(OFFSET_ANNOTATION_NAME.getValue(), this.systemProperties.getOffset()); } if (Objects.nonNull(this.systemProperties.getSequenceNumber())) { amqpAnnotatedMessage.getMessageAnnotations().put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.systemProperties.getSequenceNumber()); } if (Objects.nonNull(this.systemProperties.getPartitionKey())) { String partitionKey = this.systemProperties.getPartitionKey(); checkIdLength("partitionKey", partitionKey, MAX_PARTITION_KEY_LENGTH); checkPartitionKey(partitionKey); amqpAnnotatedMessage.getMessageAnnotations().put(PARTITION_KEY_ANNOTATION_NAME.getValue(), partitionKey); } if (Objects.nonNull(this.systemProperties.getEnqueuedTime())) { amqpAnnotatedMessage.getMessageAnnotations().put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.systemProperties.getEnqueuedTime()); } } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. */ public Map<String, Object> getProperties() { return amqpAnnotatedMessage.getApplicationProperties(); } /** * Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are * only present on a <b>received</b> {@link EventData}. * * @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. * {@code null} if the {@link EventData} is not received from the Event Hubs service. */ public Map<String, Object> getSystemProperties() { return systemProperties; } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ public byte[] getBody() { final AmqpMessageBodyType type = amqpAnnotatedMessage.getBody().getBodyType(); switch (type) { case DATA: return amqpAnnotatedMessage.getBody().getFirstData(); case SEQUENCE: case VALUE: throw logger.logExceptionAsError(new UnsupportedOperationException("Not supported AmqpBodyType: " + type.toString())); default: throw logger.logExceptionAsError(new IllegalArgumentException("Unknown AmqpBodyType: " + type.toString())); } } /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(body.toBytes(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ public BinaryData getBodyAsBinaryData() { return body; } /** * Gets the content type of the message. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * @return The content type of the {@link EventData}. */ public String getContentType() { return amqpAnnotatedMessage.getProperties().getContentType(); } /** * Sets the content type of the {@link EventData}. * * <p> * Optionally describes the payload of the message, with a descriptor following the format of RFC2045, Section 5, * for example "application/json". * </p> * * @param contentType RFC2045 Content-Type descriptor of the message. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { amqpAnnotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets a correlation identifier. * <p> * Allows an application to specify a context for the message for the purposes of correlation, for example * reflecting the MessageId of a message that is being replied to. * </p> * * @return The correlation id of this message. */ public String getCorrelationId() { String correlationId = null; AmqpMessageId amqpCorrelationId = amqpAnnotatedMessage.getProperties().getCorrelationId(); if (amqpCorrelationId != null) { correlationId = amqpCorrelationId.toString(); } return correlationId; } /** * Sets a correlation identifier. * * @param correlationId correlation id of this message * * @return The updated {@link EventData}. * @see */ public EventData setCorrelationId(String correlationId) { AmqpMessageId id = null; if (correlationId != null) { id = new AmqpMessageId(correlationId); } amqpAnnotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); return value != null ? ((Date) value).toInstant() : null; } /** * Gets the message id. * * <p> * The message identifier is an application-defined value that uniquely identifies the message and its payload. The * identifier is a free-form string and can reflect a GUID or an identifier derived from the application context. * </p> * * @return Id of the {@link EventData}. */ public String getMessageId() { String messageId = null; AmqpMessageId amqpMessageId = amqpAnnotatedMessage.getProperties().getMessageId(); if (amqpMessageId != null) { messageId = amqpMessageId.toString(); } return messageId; } /** * Sets the message id. * * @param messageId The message id to be set. * * @return The updated {@link EventData}. * @throws IllegalArgumentException if {@code messageId} is too long. */ /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(OFFSET_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return (String) amqpAnnotatedMessage.getMessageAnnotations().get(PARTITION_KEY_ANNOTATION_NAME.getValue()); } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> * {@link EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { Object value = amqpAnnotatedMessage.getMessageAnnotations().get(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); return value != null ? (Long) value : null; } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(body.toBytes()); } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(body.toBytes(), eventData.body.toBytes()); } /** * Gets the {@link AmqpAnnotatedMessage}. * * @return The raw AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return amqpAnnotatedMessage; } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * @throws NullPointerException if {@code key} or {@code value} is null. * @return The updated {@link EventData}. */ EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } /** * Checks the length of ID fields. * * Some fields within the message will cause a failure in the service without enough context information. */ private void checkIdLength(String fieldName, String value, int maxLength) { if (value != null && value.length() > maxLength) { final String message = String.format("%s cannot be longer than %d characters.", fieldName, maxLength); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * Validates that the user can't set the partitionKey to a different value than the session ID. (this will * eventually migrate to a service-side check) */ private void checkPartitionKey(String proposedPartitionKey) { if (proposedPartitionKey == null) { return; } if (amqpAnnotatedMessage.getProperties().getGroupId() != null && amqpAnnotatedMessage.getProperties().getGroupId().compareTo(proposedPartitionKey) != 0) { final String message = String.format( "partitionKey:%s cannot be set to a different value than sessionId:%s.", proposedPartitionKey, amqpAnnotatedMessage.getProperties().getGroupId()); throw logger.logExceptionAsError(new IllegalArgumentException(message)); } } /** * A collection of properties populated by Azure Event Hubs service. */ static class SystemProperties extends HashMap<String, Object> { private static final long serialVersionUID = -2827050124966993723L; private final Long offset; private final String partitionKey; private final Instant enqueuedTime; private final Long sequenceNumber; SystemProperties() { super(); offset = null; partitionKey = null; enqueuedTime = null; sequenceNumber = null; } SystemProperties(final Map<String, Object> map) { super(map); this.partitionKey = removeSystemProperty(PARTITION_KEY_ANNOTATION_NAME.getValue()); final Long offset = removeSystemProperty(OFFSET_ANNOTATION_NAME.getValue()); if (offset == null) { throw new IllegalStateException(String.format(Locale.US, "offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue())); } this.offset = offset; put(OFFSET_ANNOTATION_NAME.getValue(), this.offset); final Date enqueuedTimeValue = removeSystemProperty(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); if (enqueuedTimeValue == null) { throw new IllegalStateException(String.format(Locale.US, "enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue())); } this.enqueuedTime = enqueuedTimeValue.toInstant(); put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), this.enqueuedTime); final Long sequenceNumber = removeSystemProperty(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); if (sequenceNumber == null) { throw new IllegalStateException(String.format(Locale.US, "sequenceNumber: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue())); } this.sequenceNumber = sequenceNumber; put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), this.sequenceNumber); } /** * Gets the offset within the Event Hubs stream. * * @return The offset within the Event Hubs stream. */ private Long getOffset() { return offset; } /** * Gets a partition key used for message partitioning. If it exists, this value was used to compute a hash to * select a partition to send the message to. * * @return A partition key for this Event Data. */ private String getPartitionKey() { return partitionKey; } /** * Gets the time this event was enqueued in the Event Hub. * * @return The time this was enqueued in the service. */ private Instant getEnqueuedTime() { return enqueuedTime; } /** * Gets the sequence number in the event stream for this event. This is unique for every message received in the * Event Hub. * * @return Sequence number for this event. * @throws IllegalStateException if {@link SystemProperties} does not contain the sequence number in a retrieved * event. */ private Long getSequenceNumber() { return sequenceNumber; } @SuppressWarnings("unchecked") private <T> T removeSystemProperty(final String key) { if (this.containsKey(key)) { return (T) (this.remove(key)); } return null; } } }
class EventData { /* * These are properties owned by the service and set when a message is received. */ static final Set<String> RESERVED_SYSTEM_PROPERTIES; private final Map<String, Object> properties; private final SystemProperties systemProperties; private final AmqpAnnotatedMessage annotatedMessage; private Context context; static { final Set<String> properties = new HashSet<>(); properties.add(OFFSET_ANNOTATION_NAME.getValue()); properties.add(PARTITION_KEY_ANNOTATION_NAME.getValue()); properties.add(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); properties.add(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); properties.add(PUBLISHER_ANNOTATION_NAME.getValue()); RESERVED_SYSTEM_PROPERTIES = Collections.unmodifiableSet(properties); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(byte[] body) { this.context = Context.NONE; final AmqpMessageBody messageBody = AmqpMessageBody.fromData( Objects.requireNonNull(body, "'body' cannot be null.")); this.annotatedMessage = new AmqpAnnotatedMessage(messageBody); this.properties = annotatedMessage.getApplicationProperties(); this.systemProperties = new SystemProperties(); } /** * Creates an event containing the {@code body}. * * @param body The data to set for this event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(ByteBuffer body) { this(Objects.requireNonNull(body, "'body' cannot be null.").array()); } /** * Creates an event by encoding the {@code body} using UTF-8 charset. * * @param body The string that will be UTF-8 encoded to create an event. * * @throws NullPointerException if {@code body} is {@code null}. */ public EventData(String body) { this(Objects.requireNonNull(body, "'body' cannot be null.").getBytes(UTF_8)); } /** * Creates an event with the provided {@link BinaryData} as payload. * * @param body The {@link BinaryData} payload for this event. */ public EventData(BinaryData body) { this(Objects.requireNonNull(body, "'body' cannot be null.").toBytes()); } /** * Creates an event with the given {@code body}, system properties and context. Used in the case where a message * is received from the service. * * @param context A specified key-value pair of type {@link Context}. * @param amqpAnnotatedMessage Backing annotated message. * * @throws NullPointerException if {@code amqpAnnotatedMessage} or {@code context} is {@code null}. * @throws IllegalArgumentException if {@code amqpAnnotatedMessage}'s body type is unknown. */ EventData(AmqpAnnotatedMessage amqpAnnotatedMessage, SystemProperties systemProperties, Context context) { this.context = Objects.requireNonNull(context, "'context' cannot be null."); this.properties = Collections.unmodifiableMap(amqpAnnotatedMessage.getApplicationProperties()); this.annotatedMessage = Objects.requireNonNull(amqpAnnotatedMessage, "'amqpAnnotatedMessage' cannot be null."); this.systemProperties = systemProperties; switch (annotatedMessage.getBody().getBodyType()) { case DATA: break; case SEQUENCE: case VALUE: new ClientLogger(EventData.class).warning("Message body type '{}' is not supported in EH. " + " Getting contents of body may throw.", annotatedMessage.getBody().getBodyType()); break; default: throw new ClientLogger(EventData.class).logExceptionAsError(new IllegalArgumentException( "Body type not valid " + annotatedMessage.getBody().getBodyType())); } } /** * Gets the set of free-form event properties which may be used for passing metadata associated with the event with * the event body during Event Hubs operations. A common use-case for {@code properties()} is to associate * serialization hints for the {@link * * <p><strong>Adding serialization hint using {@code getProperties()}</strong></p> * <p>In the sample, the type of telemetry is indicated by adding an application property with key "eventType".</p> * * {@codesnippet com.azure.messaging.eventhubs.eventdata.getProperties} * * @return Application properties associated with this {@link EventData}. For received {@link EventData}, the map is * a read-only view. */ public Map<String, Object> getProperties() { return properties; } /** * Properties that are populated by Event Hubs service. As these are populated by the Event Hubs service, they are * only present on a <b>received</b> {@link EventData}. * * @return An encapsulation of all system properties appended by EventHubs service into {@link EventData}. {@code * null} if the {@link EventData} is not received from the Event Hubs service. */ public Map<String, Object> getSystemProperties() { return systemProperties; } /** * Gets the actual payload/data wrapped by EventData. * * <p> * If the means for deserializing the raw data is not apparent to consumers, a common technique is to make use of * {@link * wish to deserialize the binary data. * </p> * * @return A byte array representing the data. */ public byte[] getBody() { return annotatedMessage.getBody().getFirstData(); } /** * Returns event data as UTF-8 decoded string. * * @return UTF-8 decoded string representation of the event data. */ public String getBodyAsString() { return new String(annotatedMessage.getBody().getFirstData(), UTF_8); } /** * Returns the {@link BinaryData} payload associated with this event. * * @return the {@link BinaryData} payload associated with this event. */ public BinaryData getBodyAsBinaryData() { return BinaryData.fromBytes(annotatedMessage.getBody().getFirstData()); } /** * Gets the offset of the event when it was received from the associated Event Hub partition. This is only present * on a <b>received</b> {@link EventData}. * * @return The offset within the Event Hub partition of the received event. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Long getOffset() { return systemProperties.getOffset(); } /** * Gets the partition hashing key if it was set when originally publishing the event. If it exists, this value was * used to compute a hash to select a partition to send the message to. This is only present on a <b>received</b> * {@link EventData}. * * @return A partition key for this Event Data. {@code null} if the {@link EventData} was not received from Event * Hubs service or there was no partition key set when the event was sent to the Event Hub. */ public String getPartitionKey() { return systemProperties.getPartitionKey(); } /** * Gets the instant, in UTC, of when the event was enqueued in the Event Hub partition. This is only present on a * <b>received</b> {@link EventData}. * * @return The instant, in UTC, this was enqueued in the Event Hub partition. {@code null} if the {@link EventData} * was not received from Event Hubs service. */ public Instant getEnqueuedTime() { return systemProperties.getEnqueuedTime(); } /** * Gets the sequence number assigned to the event when it was enqueued in the associated Event Hub partition. This * is unique for every message received in the Event Hub partition. This is only present on a <b>received</b> {@link * EventData}. * * @return The sequence number for this event. {@code null} if the {@link EventData} was not received from Event * Hubs service. */ public Long getSequenceNumber() { return systemProperties.getSequenceNumber(); } /** * Gets the underlying AMQP message. * * @return The underlying AMQP message. */ public AmqpAnnotatedMessage getRawAmqpMessage() { return annotatedMessage; } /** * Gets the content type. * * @return The content type. */ public String getContentType() { return annotatedMessage.getProperties().getContentType(); } /** * Sets the content type. * * @param contentType The content type. * * @return The updated {@link EventData}. */ public EventData setContentType(String contentType) { annotatedMessage.getProperties().setContentType(contentType); return this; } /** * Gets the correlation id. * * @return The correlation id. {@code null} if there is none set. */ public String getCorrelationId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getCorrelationId(); return messageId != null ? messageId.toString() : null; } /** * Sets the correlation id. * * @param correlationId The correlation id. * * @return The updated {@link EventData}. */ public EventData setCorrelationId(String correlationId) { final AmqpMessageId id = correlationId != null ? new AmqpMessageId(correlationId) : null; annotatedMessage.getProperties().setCorrelationId(id); return this; } /** * Gets the message id. * * @return The message id. {@code null} if there is none set. */ public String getMessageId() { final AmqpMessageId messageId = annotatedMessage.getProperties().getMessageId(); return messageId != null ? messageId.toString() : null; } /** * Sets the message id. * * @param messageId The message id. * * @return The updated {@link EventData}. */ /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EventData eventData = (EventData) o; return Arrays.equals(annotatedMessage.getBody().getFirstData(), eventData.annotatedMessage.getBody().getFirstData()); } /** * {@inheritDoc} */ @Override public int hashCode() { return Arrays.hashCode(annotatedMessage.getBody().getFirstData()); } /** * A specified key-value pair of type {@link Context} to set additional information on the event. * * @return the {@link Context} object set on the event */ Context getContext() { return context; } /** * Adds a new key value pair to the existing context on Event Data. * * @param key The key for this context object * @param value The value for this context object. * * @return The updated {@link EventData}. * * @throws NullPointerException if {@code key} or {@code value} is null. */ public EventData addContext(String key, Object value) { Objects.requireNonNull(key, "The 'key' parameter cannot be null."); Objects.requireNonNull(value, "The 'value' parameter cannot be null."); this.context = context.addData(key, value); return this; } }
Do we need `.then(Mono.empty())`? If so, then use `then()`?
private Mono<Void> updateDispositionInternal(String lockToken, DeliveryState deliveryState) { final Delivery unsettled = unsettledDeliveries.get(lockToken); if (unsettled == null) { logger.warning("entityPath[{}], linkName[{}], deliveryTag[{}]. Delivery not found to update disposition.", getEntityPath(), getLinkName(), lockToken); return monoError(logger, Exceptions.propagate(new IllegalArgumentException( "Delivery not on receive link."))); } final UpdateDispositionWorkItem workItem = new UpdateDispositionWorkItem(lockToken, deliveryState, timeout); final Mono<Void> result = Mono.create(sink -> { workItem.start(sink); try { provider.getReactorDispatcher().invoke(() -> { unsettled.disposition(deliveryState); pendingUpdates.put(lockToken, workItem); }); } catch (IOException error) { sink.error(new AmqpException(false, "updateDisposition failed while dispatching to Reactor.", error, handler.getErrorContext(receiver))); } }).cache().then(Mono.empty()); workItem.setMono(result); return result; }
}).cache().then(Mono.empty());
private Mono<Void> updateDispositionInternal(String lockToken, DeliveryState deliveryState) { final Delivery unsettled = unsettledDeliveries.get(lockToken); if (unsettled == null) { logger.warning("entityPath[{}], linkName[{}], deliveryTag[{}]. Delivery not found to update disposition.", getEntityPath(), getLinkName(), lockToken); return monoError(logger, Exceptions.propagate(new IllegalArgumentException( "Delivery not on receive link."))); } final UpdateDispositionWorkItem workItem = new UpdateDispositionWorkItem(lockToken, deliveryState, timeout); final Mono<Void> result = Mono.<Void>create(sink -> { workItem.start(sink); try { provider.getReactorDispatcher().invoke(() -> { unsettled.disposition(deliveryState); pendingUpdates.put(lockToken, workItem); }); } catch (IOException error) { sink.error(new AmqpException(false, "updateDisposition failed while dispatching to Reactor.", error, handler.getErrorContext(receiver))); } }).cache(); workItem.setMono(result); return result; }
class ServiceBusReactorReceiver extends ReactorReceiver implements ServiceBusReceiveLink { private static final Message EMPTY_MESSAGE = Proton.message(); private final ClientLogger logger = new ClientLogger(ServiceBusReactorReceiver.class); private final ConcurrentHashMap<String, Delivery> unsettledDeliveries = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String, UpdateDispositionWorkItem> pendingUpdates = new ConcurrentHashMap<>(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final Disposable subscription; private final Receiver receiver; /** * Indicates whether the message has already been settled from the sender side. This is the case when {@link * ServiceBusReceiveMode */ private final boolean isSettled; private final Duration timeout; private final AmqpRetryPolicy retryPolicy; private final ReceiveLinkHandler handler; private final ReactorProvider provider; private final Mono<String> sessionIdMono; private final Mono<OffsetDateTime> sessionLockedUntil; public ServiceBusReactorReceiver(AmqpConnection connection, String entityPath, Receiver receiver, ReceiveLinkHandler handler, TokenManager tokenManager, ReactorProvider provider, Duration timeout, AmqpRetryPolicy retryPolicy) { super(connection, entityPath, receiver, handler, tokenManager, provider.getReactorDispatcher(), retryPolicy.getRetryOptions()); this.receiver = receiver; this.handler = handler; this.provider = provider; this.isSettled = receiver.getSenderSettleMode() == SenderSettleMode.SETTLED; this.timeout = timeout; this.retryPolicy = retryPolicy; this.subscription = Flux.interval(timeout).subscribe(i -> cleanupWorkItems()); this.sessionIdMono = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .flatMap(state -> { @SuppressWarnings("unchecked") final Map<Symbol, Object> remoteSource = ((Source) receiver.getRemoteSource()).getFilter(); final Object value = remoteSource.get(SESSION_FILTER); if (value == null) { logger.info("entityPath[{}], linkName[{}]. There is no session id.", entityPath, getLinkName()); return Mono.empty(); } final String actualSessionId = String.valueOf(value); return Mono.just(actualSessionId); }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); this.sessionLockedUntil = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .map(state -> { if (receiver.getRemoteProperties() != null && receiver.getRemoteProperties().containsKey(LOCKED_UNTIL_UTC)) { final long ticks = (long) receiver.getRemoteProperties().get(LOCKED_UNTIL_UTC); return MessageUtils.convertDotNetTicksToOffsetDateTime(ticks); } else { logger.info("entityPath[{}], linkName[{}]. Locked until not set.", entityPath, getLinkName()); return Instant.EPOCH.atOffset(ZoneOffset.UTC); } }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); } @Override public Mono<Void> updateDisposition(String lockToken, DeliveryState deliveryState) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException("Cannot perform operations on a disposed receiver.")); } return updateDispositionInternal(lockToken, deliveryState); } @Override public Flux<Message> receive() { return super.receive() .filter(message -> message != EMPTY_MESSAGE) .publishOn(Schedulers.boundedElastic()); } @Override public Mono<String> getSessionId() { return sessionIdMono; } @Override public Mono<OffsetDateTime> getSessionLockedUntil() { return sessionLockedUntil; } @Override public Mono<Void> closeAsync() { if (isDisposed.getAndSet(true)) { return super.closeAsync(); } cleanupWorkItems(); final Mono<Void> disposeMono; if (!pendingUpdates.isEmpty()) { final List<Mono<Void>> pending = new ArrayList<>(); final StringJoiner builder = new StringJoiner(", "); for (UpdateDispositionWorkItem workItem : pendingUpdates.values()) { if (workItem.hasTimedout()) { continue; } if (workItem.getDeliveryState() instanceof TransactionalState) { pending.add(updateDispositionInternal(workItem.getLockToken(), Released.getInstance())); } else { pending.add(workItem.getMono()); } builder.add(workItem.getLockToken()); } logger.info("Waiting for pending updates to complete. Locks: {}", builder.toString()); disposeMono = Mono.when(pending); } else { disposeMono = Mono.empty(); } return disposeMono.onErrorResume(error -> { logger.info("There was an exception while disposing of all links.", error); return Mono.empty(); }).doFinally(signal -> subscription.dispose()).then(super.closeAsync()); } @Override protected Message decodeDelivery(Delivery delivery) { final byte[] deliveryTag = delivery.getTag(); final UUID lockToken; if (deliveryTag != null && deliveryTag.length == LOCK_TOKEN_SIZE) { lockToken = MessageUtils.convertDotNetBytesToUUID(deliveryTag); } else { lockToken = MessageUtils.ZERO_LOCK_TOKEN; } final String lockTokenString = lockToken.toString(); if (lockToken == MessageUtils.ZERO_LOCK_TOKEN || !unsettledDeliveries.containsKey(lockTokenString)) { final int messageSize = delivery.pending(); final byte[] buffer = new byte[messageSize]; final int read = receiver.recv(buffer, 0, messageSize); final Message message = Proton.message(); message.decode(buffer, 0, read); if (isSettled) { delivery.disposition(Accepted.getInstance()); delivery.settle(); } else { unsettledDeliveries.putIfAbsent(lockToken.toString(), delivery); receiver.advance(); } return new MessageWithLockToken(message, lockToken); } else { updateOutcome(lockTokenString, delivery); return EMPTY_MESSAGE; } } /** * Updates the outcome of a delivery. This occurs when a message is being settled from the receiver side. * @param delivery Delivery to update. */ private void updateOutcome(String lockToken, Delivery delivery) { final DeliveryState remoteState = delivery.getRemoteState(); logger.verbose("entityPath[{}], linkName[{}], deliveryTag[{}], state[{}] Received update disposition delivery.", getEntityPath(), getLinkName(), lockToken, remoteState); final Outcome remoteOutcome; if (remoteState instanceof Outcome) { remoteOutcome = (Outcome) remoteState; } else if (remoteState instanceof TransactionalState) { remoteOutcome = ((TransactionalState) remoteState).getOutcome(); } else { remoteOutcome = null; } if (remoteOutcome == null) { logger.warning("linkName[{}], deliveryTag[{}]. No outcome associated with delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } final UpdateDispositionWorkItem workItem = pendingUpdates.get(lockToken); if (workItem == null) { logger.warning("linkName[{}], deliveryTag[{}]. No pending update for delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } if (remoteState.getType() == workItem.getDeliveryState().getType()) { completeWorkItem(lockToken, delivery, workItem.getSink(), null); return; } logger.info("Received delivery '{}' state '{}' doesn't match expected state '{}'", lockToken, remoteState, workItem.getDeliveryState()); switch (remoteState.getType()) { case Rejected: final Rejected rejected = (Rejected) remoteOutcome; final ErrorCondition errorCondition = rejected.getError(); final Throwable exception = ExceptionUtil.toException(errorCondition.getCondition().toString(), errorCondition.getDescription(), handler.getErrorContext(receiver)); final Duration retry = retryPolicy.calculateRetryDelay(exception, workItem.incrementRetry()); if (retry == null) { logger.info("deliveryTag[{}], state[{}]. Retry attempts exhausted.", lockToken, exception); completeWorkItem(lockToken, delivery, workItem.getSink(), exception); } else { workItem.setLastException(exception); workItem.resetStartTime(); try { provider.getReactorDispatcher().invoke(() -> delivery.disposition(workItem.getDeliveryState())); } catch (IOException error) { final Throwable amqpException = logger.logExceptionAsError(new AmqpException(false, "linkName[%s], deliveryTag[%s]. Retrying updateDisposition failed to dispatch to Reactor.", error, handler.getErrorContext(receiver))); completeWorkItem(lockToken, delivery, workItem.getSink(), amqpException); } } break; case Released: final Throwable cancelled = new AmqpException(false, AmqpErrorCondition.OPERATION_CANCELLED, "AMQP layer unexpectedly aborted or disconnected.", handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}]. Completing pending updateState operation with exception.", lockToken, remoteState.getType(), cancelled); completeWorkItem(lockToken, delivery, workItem.getSink(), cancelled); break; default: final AmqpException error = new AmqpException(false, remoteOutcome.toString(), handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}] Completing pending updateState operation with exception.", lockToken, remoteState.getType(), error); completeWorkItem(lockToken, delivery, workItem.getSink(), error); break; } } private void cleanupWorkItems() { logger.verbose("linkName[{}]: Cleaning timed out update work tasks.", getLinkName()); pendingUpdates.forEach((key, value) -> { if (value == null || !value.hasTimedout()) { return; } pendingUpdates.remove(key); final Throwable error = value.getLastException() != null ? value.getLastException() : new AmqpException(true, AmqpErrorCondition.TIMEOUT_ERROR, "Update disposition request timed out.", handler.getErrorContext(receiver)); completeWorkItem(key, null, value.getSink(), error); }); } private void completeWorkItem(String lockToken, Delivery delivery, MonoSink<Object> sink, Throwable error) { final boolean isSettled = delivery != null && delivery.remotelySettled(); if (isSettled) { delivery.settle(); } if (error != null) { final Throwable loggedError = error instanceof RuntimeException ? logger.logExceptionAsError((RuntimeException) error) : error; sink.error(loggedError); } else { sink.success(); } if (isSettled) { pendingUpdates.remove(lockToken); unsettledDeliveries.remove(lockToken); } } private static final class UpdateDispositionWorkItem { private final String lockToken; private final DeliveryState state; private final Duration timeout; private final AtomicInteger retryAttempts = new AtomicInteger(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private Mono<Void> mono; private Instant expirationTime; private MonoSink<Object> sink; private Throwable throwable; private UpdateDispositionWorkItem(String lockToken, DeliveryState state, Duration timeout) { this.lockToken = lockToken; this.state = state; this.timeout = timeout; } private boolean hasTimedout() { return expirationTime.isBefore(Instant.now()); } private void resetStartTime() { this.expirationTime = Instant.now().plus(timeout); } private int incrementRetry() { return retryAttempts.incrementAndGet(); } private Throwable getLastException() { return throwable; } private void setLastException(Throwable throwable) { this.throwable = throwable; } private void setMono(Mono<Void> mono) { this.mono = mono; } private Mono<Void> getMono() { return mono; } private MonoSink<Object> getSink() { return sink; } private void start(MonoSink<Object> sink) { Objects.requireNonNull(sink, "'sink' cannot be null."); this.sink = sink; this.sink.onDispose(() -> isDisposed.set(true)); this.sink.onCancel(() -> isDisposed.set(true)); resetStartTime(); } private DeliveryState getDeliveryState() { return state; } public String getLockToken() { return lockToken; } } }
class ServiceBusReactorReceiver extends ReactorReceiver implements ServiceBusReceiveLink { private static final Message EMPTY_MESSAGE = Proton.message(); private final ClientLogger logger = new ClientLogger(ServiceBusReactorReceiver.class); private final ConcurrentHashMap<String, Delivery> unsettledDeliveries = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String, UpdateDispositionWorkItem> pendingUpdates = new ConcurrentHashMap<>(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final Disposable subscription; private final Receiver receiver; /** * Indicates whether the message has already been settled from the sender side. This is the case when {@link * ServiceBusReceiveMode */ private final boolean isSettled; private final Duration timeout; private final AmqpRetryPolicy retryPolicy; private final ReceiveLinkHandler handler; private final ReactorProvider provider; private final Mono<String> sessionIdMono; private final Mono<OffsetDateTime> sessionLockedUntil; public ServiceBusReactorReceiver(AmqpConnection connection, String entityPath, Receiver receiver, ReceiveLinkHandler handler, TokenManager tokenManager, ReactorProvider provider, Duration timeout, AmqpRetryPolicy retryPolicy) { super(connection, entityPath, receiver, handler, tokenManager, provider.getReactorDispatcher(), retryPolicy.getRetryOptions()); this.receiver = receiver; this.handler = handler; this.provider = provider; this.isSettled = receiver.getSenderSettleMode() == SenderSettleMode.SETTLED; this.timeout = timeout; this.retryPolicy = retryPolicy; this.subscription = Flux.interval(timeout).subscribe(i -> cleanupWorkItems()); this.sessionIdMono = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .flatMap(state -> { @SuppressWarnings("unchecked") final Map<Symbol, Object> remoteSource = ((Source) receiver.getRemoteSource()).getFilter(); final Object value = remoteSource.get(SESSION_FILTER); if (value == null) { logger.info("entityPath[{}], linkName[{}]. There is no session id.", entityPath, getLinkName()); return Mono.empty(); } final String actualSessionId = String.valueOf(value); return Mono.just(actualSessionId); }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); this.sessionLockedUntil = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .map(state -> { if (receiver.getRemoteProperties() != null && receiver.getRemoteProperties().containsKey(LOCKED_UNTIL_UTC)) { final long ticks = (long) receiver.getRemoteProperties().get(LOCKED_UNTIL_UTC); return MessageUtils.convertDotNetTicksToOffsetDateTime(ticks); } else { logger.info("entityPath[{}], linkName[{}]. Locked until not set.", entityPath, getLinkName()); return Instant.EPOCH.atOffset(ZoneOffset.UTC); } }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); } @Override public Mono<Void> updateDisposition(String lockToken, DeliveryState deliveryState) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException("Cannot perform operations on a disposed receiver.")); } return updateDispositionInternal(lockToken, deliveryState); } @Override public Flux<Message> receive() { return super.receive() .filter(message -> message != EMPTY_MESSAGE) .publishOn(Schedulers.boundedElastic()); } @Override public Mono<String> getSessionId() { return sessionIdMono; } @Override public Mono<OffsetDateTime> getSessionLockedUntil() { return sessionLockedUntil; } @Override public Mono<Void> closeAsync() { if (isDisposed.getAndSet(true)) { return super.closeAsync(); } cleanupWorkItems(); final Mono<Void> disposeMono; if (!pendingUpdates.isEmpty()) { final List<Mono<Void>> pending = new ArrayList<>(); final StringJoiner builder = new StringJoiner(", "); for (UpdateDispositionWorkItem workItem : pendingUpdates.values()) { if (workItem.hasTimedout()) { continue; } if (workItem.getDeliveryState() instanceof TransactionalState) { pending.add(updateDispositionInternal(workItem.getLockToken(), Released.getInstance())); } else { pending.add(workItem.getMono()); } builder.add(workItem.getLockToken()); } logger.info("Waiting for pending updates to complete. Locks: {}", builder.toString()); disposeMono = Mono.when(pending); } else { disposeMono = Mono.empty(); } return disposeMono.onErrorResume(error -> { logger.info("There was an exception while disposing of all links.", error); return Mono.empty(); }).doFinally(signal -> subscription.dispose()).then(super.closeAsync()); } @Override protected Message decodeDelivery(Delivery delivery) { final byte[] deliveryTag = delivery.getTag(); final UUID lockToken; if (deliveryTag != null && deliveryTag.length == LOCK_TOKEN_SIZE) { lockToken = MessageUtils.convertDotNetBytesToUUID(deliveryTag); } else { lockToken = MessageUtils.ZERO_LOCK_TOKEN; } final String lockTokenString = lockToken.toString(); if (lockToken == MessageUtils.ZERO_LOCK_TOKEN || !unsettledDeliveries.containsKey(lockTokenString)) { final int messageSize = delivery.pending(); final byte[] buffer = new byte[messageSize]; final int read = receiver.recv(buffer, 0, messageSize); final Message message = Proton.message(); message.decode(buffer, 0, read); if (isSettled) { delivery.disposition(Accepted.getInstance()); delivery.settle(); } else { unsettledDeliveries.putIfAbsent(lockToken.toString(), delivery); receiver.advance(); } return new MessageWithLockToken(message, lockToken); } else { updateOutcome(lockTokenString, delivery); return EMPTY_MESSAGE; } } /** * Updates the outcome of a delivery. This occurs when a message is being settled from the receiver side. * @param delivery Delivery to update. */ private void updateOutcome(String lockToken, Delivery delivery) { final DeliveryState remoteState = delivery.getRemoteState(); logger.verbose("entityPath[{}], linkName[{}], deliveryTag[{}], state[{}] Received update disposition delivery.", getEntityPath(), getLinkName(), lockToken, remoteState); final Outcome remoteOutcome; if (remoteState instanceof Outcome) { remoteOutcome = (Outcome) remoteState; } else if (remoteState instanceof TransactionalState) { remoteOutcome = ((TransactionalState) remoteState).getOutcome(); } else { remoteOutcome = null; } if (remoteOutcome == null) { logger.warning("linkName[{}], deliveryTag[{}]. No outcome associated with delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } final UpdateDispositionWorkItem workItem = pendingUpdates.get(lockToken); if (workItem == null) { logger.warning("linkName[{}], deliveryTag[{}]. No pending update for delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } if (remoteState.getType() == workItem.getDeliveryState().getType()) { completeWorkItem(lockToken, delivery, workItem.getSink(), null); return; } logger.info("Received delivery '{}' state '{}' doesn't match expected state '{}'", lockToken, remoteState, workItem.getDeliveryState()); switch (remoteState.getType()) { case Rejected: final Rejected rejected = (Rejected) remoteOutcome; final ErrorCondition errorCondition = rejected.getError(); final Throwable exception = ExceptionUtil.toException(errorCondition.getCondition().toString(), errorCondition.getDescription(), handler.getErrorContext(receiver)); final Duration retry = retryPolicy.calculateRetryDelay(exception, workItem.incrementRetry()); if (retry == null) { logger.info("deliveryTag[{}], state[{}]. Retry attempts exhausted.", lockToken, exception); completeWorkItem(lockToken, delivery, workItem.getSink(), exception); } else { workItem.setLastException(exception); workItem.resetStartTime(); try { provider.getReactorDispatcher().invoke(() -> delivery.disposition(workItem.getDeliveryState())); } catch (IOException error) { final Throwable amqpException = logger.logExceptionAsError(new AmqpException(false, "linkName[%s], deliveryTag[%s]. Retrying updateDisposition failed to dispatch to Reactor.", error, handler.getErrorContext(receiver))); completeWorkItem(lockToken, delivery, workItem.getSink(), amqpException); } } break; case Released: final Throwable cancelled = new AmqpException(false, AmqpErrorCondition.OPERATION_CANCELLED, "AMQP layer unexpectedly aborted or disconnected.", handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}]. Completing pending updateState operation with exception.", lockToken, remoteState.getType(), cancelled); completeWorkItem(lockToken, delivery, workItem.getSink(), cancelled); break; default: final AmqpException error = new AmqpException(false, remoteOutcome.toString(), handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}] Completing pending updateState operation with exception.", lockToken, remoteState.getType(), error); completeWorkItem(lockToken, delivery, workItem.getSink(), error); break; } } private void cleanupWorkItems() { logger.verbose("linkName[{}]: Cleaning timed out update work tasks.", getLinkName()); pendingUpdates.forEach((key, value) -> { if (value == null || !value.hasTimedout()) { return; } pendingUpdates.remove(key); final Throwable error = value.getLastException() != null ? value.getLastException() : new AmqpException(true, AmqpErrorCondition.TIMEOUT_ERROR, "Update disposition request timed out.", handler.getErrorContext(receiver)); completeWorkItem(key, null, value.getSink(), error); }); } private void completeWorkItem(String lockToken, Delivery delivery, MonoSink<Void> sink, Throwable error) { final boolean isSettled = delivery != null && delivery.remotelySettled(); if (isSettled) { delivery.settle(); } if (error != null) { final Throwable loggedError = error instanceof RuntimeException ? logger.logExceptionAsError((RuntimeException) error) : error; sink.error(loggedError); } else { sink.success(); } if (isSettled) { pendingUpdates.remove(lockToken); unsettledDeliveries.remove(lockToken); } } private static final class UpdateDispositionWorkItem { private final String lockToken; private final DeliveryState state; private final Duration timeout; private final AtomicInteger retryAttempts = new AtomicInteger(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private Mono<Void> mono; private Instant expirationTime; private MonoSink<Void> sink; private Throwable throwable; private UpdateDispositionWorkItem(String lockToken, DeliveryState state, Duration timeout) { this.lockToken = lockToken; this.state = state; this.timeout = timeout; } private boolean hasTimedout() { return expirationTime.isBefore(Instant.now()); } private void resetStartTime() { this.expirationTime = Instant.now().plus(timeout); } private int incrementRetry() { return retryAttempts.incrementAndGet(); } private Throwable getLastException() { return throwable; } private void setLastException(Throwable throwable) { this.throwable = throwable; } private void setMono(Mono<Void> mono) { this.mono = mono; } private Mono<Void> getMono() { return mono; } private MonoSink<Void> getSink() { return sink; } private void start(MonoSink<Void> sink) { Objects.requireNonNull(sink, "'sink' cannot be null."); this.sink = sink; this.sink.onDispose(() -> isDisposed.set(true)); this.sink.onCancel(() -> isDisposed.set(true)); resetStartTime(); } private DeliveryState getDeliveryState() { return state; } public String getLockToken() { return lockToken; } } }
Good catch. Mono.empty() is not required here.
private Mono<Void> updateDispositionInternal(String lockToken, DeliveryState deliveryState) { final Delivery unsettled = unsettledDeliveries.get(lockToken); if (unsettled == null) { logger.warning("entityPath[{}], linkName[{}], deliveryTag[{}]. Delivery not found to update disposition.", getEntityPath(), getLinkName(), lockToken); return monoError(logger, Exceptions.propagate(new IllegalArgumentException( "Delivery not on receive link."))); } final UpdateDispositionWorkItem workItem = new UpdateDispositionWorkItem(lockToken, deliveryState, timeout); final Mono<Void> result = Mono.create(sink -> { workItem.start(sink); try { provider.getReactorDispatcher().invoke(() -> { unsettled.disposition(deliveryState); pendingUpdates.put(lockToken, workItem); }); } catch (IOException error) { sink.error(new AmqpException(false, "updateDisposition failed while dispatching to Reactor.", error, handler.getErrorContext(receiver))); } }).cache().then(Mono.empty()); workItem.setMono(result); return result; }
}).cache().then(Mono.empty());
private Mono<Void> updateDispositionInternal(String lockToken, DeliveryState deliveryState) { final Delivery unsettled = unsettledDeliveries.get(lockToken); if (unsettled == null) { logger.warning("entityPath[{}], linkName[{}], deliveryTag[{}]. Delivery not found to update disposition.", getEntityPath(), getLinkName(), lockToken); return monoError(logger, Exceptions.propagate(new IllegalArgumentException( "Delivery not on receive link."))); } final UpdateDispositionWorkItem workItem = new UpdateDispositionWorkItem(lockToken, deliveryState, timeout); final Mono<Void> result = Mono.<Void>create(sink -> { workItem.start(sink); try { provider.getReactorDispatcher().invoke(() -> { unsettled.disposition(deliveryState); pendingUpdates.put(lockToken, workItem); }); } catch (IOException error) { sink.error(new AmqpException(false, "updateDisposition failed while dispatching to Reactor.", error, handler.getErrorContext(receiver))); } }).cache(); workItem.setMono(result); return result; }
class ServiceBusReactorReceiver extends ReactorReceiver implements ServiceBusReceiveLink { private static final Message EMPTY_MESSAGE = Proton.message(); private final ClientLogger logger = new ClientLogger(ServiceBusReactorReceiver.class); private final ConcurrentHashMap<String, Delivery> unsettledDeliveries = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String, UpdateDispositionWorkItem> pendingUpdates = new ConcurrentHashMap<>(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final Disposable subscription; private final Receiver receiver; /** * Indicates whether the message has already been settled from the sender side. This is the case when {@link * ServiceBusReceiveMode */ private final boolean isSettled; private final Duration timeout; private final AmqpRetryPolicy retryPolicy; private final ReceiveLinkHandler handler; private final ReactorProvider provider; private final Mono<String> sessionIdMono; private final Mono<OffsetDateTime> sessionLockedUntil; public ServiceBusReactorReceiver(AmqpConnection connection, String entityPath, Receiver receiver, ReceiveLinkHandler handler, TokenManager tokenManager, ReactorProvider provider, Duration timeout, AmqpRetryPolicy retryPolicy) { super(connection, entityPath, receiver, handler, tokenManager, provider.getReactorDispatcher(), retryPolicy.getRetryOptions()); this.receiver = receiver; this.handler = handler; this.provider = provider; this.isSettled = receiver.getSenderSettleMode() == SenderSettleMode.SETTLED; this.timeout = timeout; this.retryPolicy = retryPolicy; this.subscription = Flux.interval(timeout).subscribe(i -> cleanupWorkItems()); this.sessionIdMono = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .flatMap(state -> { @SuppressWarnings("unchecked") final Map<Symbol, Object> remoteSource = ((Source) receiver.getRemoteSource()).getFilter(); final Object value = remoteSource.get(SESSION_FILTER); if (value == null) { logger.info("entityPath[{}], linkName[{}]. There is no session id.", entityPath, getLinkName()); return Mono.empty(); } final String actualSessionId = String.valueOf(value); return Mono.just(actualSessionId); }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); this.sessionLockedUntil = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .map(state -> { if (receiver.getRemoteProperties() != null && receiver.getRemoteProperties().containsKey(LOCKED_UNTIL_UTC)) { final long ticks = (long) receiver.getRemoteProperties().get(LOCKED_UNTIL_UTC); return MessageUtils.convertDotNetTicksToOffsetDateTime(ticks); } else { logger.info("entityPath[{}], linkName[{}]. Locked until not set.", entityPath, getLinkName()); return Instant.EPOCH.atOffset(ZoneOffset.UTC); } }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); } @Override public Mono<Void> updateDisposition(String lockToken, DeliveryState deliveryState) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException("Cannot perform operations on a disposed receiver.")); } return updateDispositionInternal(lockToken, deliveryState); } @Override public Flux<Message> receive() { return super.receive() .filter(message -> message != EMPTY_MESSAGE) .publishOn(Schedulers.boundedElastic()); } @Override public Mono<String> getSessionId() { return sessionIdMono; } @Override public Mono<OffsetDateTime> getSessionLockedUntil() { return sessionLockedUntil; } @Override public Mono<Void> closeAsync() { if (isDisposed.getAndSet(true)) { return super.closeAsync(); } cleanupWorkItems(); final Mono<Void> disposeMono; if (!pendingUpdates.isEmpty()) { final List<Mono<Void>> pending = new ArrayList<>(); final StringJoiner builder = new StringJoiner(", "); for (UpdateDispositionWorkItem workItem : pendingUpdates.values()) { if (workItem.hasTimedout()) { continue; } if (workItem.getDeliveryState() instanceof TransactionalState) { pending.add(updateDispositionInternal(workItem.getLockToken(), Released.getInstance())); } else { pending.add(workItem.getMono()); } builder.add(workItem.getLockToken()); } logger.info("Waiting for pending updates to complete. Locks: {}", builder.toString()); disposeMono = Mono.when(pending); } else { disposeMono = Mono.empty(); } return disposeMono.onErrorResume(error -> { logger.info("There was an exception while disposing of all links.", error); return Mono.empty(); }).doFinally(signal -> subscription.dispose()).then(super.closeAsync()); } @Override protected Message decodeDelivery(Delivery delivery) { final byte[] deliveryTag = delivery.getTag(); final UUID lockToken; if (deliveryTag != null && deliveryTag.length == LOCK_TOKEN_SIZE) { lockToken = MessageUtils.convertDotNetBytesToUUID(deliveryTag); } else { lockToken = MessageUtils.ZERO_LOCK_TOKEN; } final String lockTokenString = lockToken.toString(); if (lockToken == MessageUtils.ZERO_LOCK_TOKEN || !unsettledDeliveries.containsKey(lockTokenString)) { final int messageSize = delivery.pending(); final byte[] buffer = new byte[messageSize]; final int read = receiver.recv(buffer, 0, messageSize); final Message message = Proton.message(); message.decode(buffer, 0, read); if (isSettled) { delivery.disposition(Accepted.getInstance()); delivery.settle(); } else { unsettledDeliveries.putIfAbsent(lockToken.toString(), delivery); receiver.advance(); } return new MessageWithLockToken(message, lockToken); } else { updateOutcome(lockTokenString, delivery); return EMPTY_MESSAGE; } } /** * Updates the outcome of a delivery. This occurs when a message is being settled from the receiver side. * @param delivery Delivery to update. */ private void updateOutcome(String lockToken, Delivery delivery) { final DeliveryState remoteState = delivery.getRemoteState(); logger.verbose("entityPath[{}], linkName[{}], deliveryTag[{}], state[{}] Received update disposition delivery.", getEntityPath(), getLinkName(), lockToken, remoteState); final Outcome remoteOutcome; if (remoteState instanceof Outcome) { remoteOutcome = (Outcome) remoteState; } else if (remoteState instanceof TransactionalState) { remoteOutcome = ((TransactionalState) remoteState).getOutcome(); } else { remoteOutcome = null; } if (remoteOutcome == null) { logger.warning("linkName[{}], deliveryTag[{}]. No outcome associated with delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } final UpdateDispositionWorkItem workItem = pendingUpdates.get(lockToken); if (workItem == null) { logger.warning("linkName[{}], deliveryTag[{}]. No pending update for delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } if (remoteState.getType() == workItem.getDeliveryState().getType()) { completeWorkItem(lockToken, delivery, workItem.getSink(), null); return; } logger.info("Received delivery '{}' state '{}' doesn't match expected state '{}'", lockToken, remoteState, workItem.getDeliveryState()); switch (remoteState.getType()) { case Rejected: final Rejected rejected = (Rejected) remoteOutcome; final ErrorCondition errorCondition = rejected.getError(); final Throwable exception = ExceptionUtil.toException(errorCondition.getCondition().toString(), errorCondition.getDescription(), handler.getErrorContext(receiver)); final Duration retry = retryPolicy.calculateRetryDelay(exception, workItem.incrementRetry()); if (retry == null) { logger.info("deliveryTag[{}], state[{}]. Retry attempts exhausted.", lockToken, exception); completeWorkItem(lockToken, delivery, workItem.getSink(), exception); } else { workItem.setLastException(exception); workItem.resetStartTime(); try { provider.getReactorDispatcher().invoke(() -> delivery.disposition(workItem.getDeliveryState())); } catch (IOException error) { final Throwable amqpException = logger.logExceptionAsError(new AmqpException(false, "linkName[%s], deliveryTag[%s]. Retrying updateDisposition failed to dispatch to Reactor.", error, handler.getErrorContext(receiver))); completeWorkItem(lockToken, delivery, workItem.getSink(), amqpException); } } break; case Released: final Throwable cancelled = new AmqpException(false, AmqpErrorCondition.OPERATION_CANCELLED, "AMQP layer unexpectedly aborted or disconnected.", handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}]. Completing pending updateState operation with exception.", lockToken, remoteState.getType(), cancelled); completeWorkItem(lockToken, delivery, workItem.getSink(), cancelled); break; default: final AmqpException error = new AmqpException(false, remoteOutcome.toString(), handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}] Completing pending updateState operation with exception.", lockToken, remoteState.getType(), error); completeWorkItem(lockToken, delivery, workItem.getSink(), error); break; } } private void cleanupWorkItems() { logger.verbose("linkName[{}]: Cleaning timed out update work tasks.", getLinkName()); pendingUpdates.forEach((key, value) -> { if (value == null || !value.hasTimedout()) { return; } pendingUpdates.remove(key); final Throwable error = value.getLastException() != null ? value.getLastException() : new AmqpException(true, AmqpErrorCondition.TIMEOUT_ERROR, "Update disposition request timed out.", handler.getErrorContext(receiver)); completeWorkItem(key, null, value.getSink(), error); }); } private void completeWorkItem(String lockToken, Delivery delivery, MonoSink<Object> sink, Throwable error) { final boolean isSettled = delivery != null && delivery.remotelySettled(); if (isSettled) { delivery.settle(); } if (error != null) { final Throwable loggedError = error instanceof RuntimeException ? logger.logExceptionAsError((RuntimeException) error) : error; sink.error(loggedError); } else { sink.success(); } if (isSettled) { pendingUpdates.remove(lockToken); unsettledDeliveries.remove(lockToken); } } private static final class UpdateDispositionWorkItem { private final String lockToken; private final DeliveryState state; private final Duration timeout; private final AtomicInteger retryAttempts = new AtomicInteger(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private Mono<Void> mono; private Instant expirationTime; private MonoSink<Object> sink; private Throwable throwable; private UpdateDispositionWorkItem(String lockToken, DeliveryState state, Duration timeout) { this.lockToken = lockToken; this.state = state; this.timeout = timeout; } private boolean hasTimedout() { return expirationTime.isBefore(Instant.now()); } private void resetStartTime() { this.expirationTime = Instant.now().plus(timeout); } private int incrementRetry() { return retryAttempts.incrementAndGet(); } private Throwable getLastException() { return throwable; } private void setLastException(Throwable throwable) { this.throwable = throwable; } private void setMono(Mono<Void> mono) { this.mono = mono; } private Mono<Void> getMono() { return mono; } private MonoSink<Object> getSink() { return sink; } private void start(MonoSink<Object> sink) { Objects.requireNonNull(sink, "'sink' cannot be null."); this.sink = sink; this.sink.onDispose(() -> isDisposed.set(true)); this.sink.onCancel(() -> isDisposed.set(true)); resetStartTime(); } private DeliveryState getDeliveryState() { return state; } public String getLockToken() { return lockToken; } } }
class ServiceBusReactorReceiver extends ReactorReceiver implements ServiceBusReceiveLink { private static final Message EMPTY_MESSAGE = Proton.message(); private final ClientLogger logger = new ClientLogger(ServiceBusReactorReceiver.class); private final ConcurrentHashMap<String, Delivery> unsettledDeliveries = new ConcurrentHashMap<>(); private final ConcurrentHashMap<String, UpdateDispositionWorkItem> pendingUpdates = new ConcurrentHashMap<>(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private final Disposable subscription; private final Receiver receiver; /** * Indicates whether the message has already been settled from the sender side. This is the case when {@link * ServiceBusReceiveMode */ private final boolean isSettled; private final Duration timeout; private final AmqpRetryPolicy retryPolicy; private final ReceiveLinkHandler handler; private final ReactorProvider provider; private final Mono<String> sessionIdMono; private final Mono<OffsetDateTime> sessionLockedUntil; public ServiceBusReactorReceiver(AmqpConnection connection, String entityPath, Receiver receiver, ReceiveLinkHandler handler, TokenManager tokenManager, ReactorProvider provider, Duration timeout, AmqpRetryPolicy retryPolicy) { super(connection, entityPath, receiver, handler, tokenManager, provider.getReactorDispatcher(), retryPolicy.getRetryOptions()); this.receiver = receiver; this.handler = handler; this.provider = provider; this.isSettled = receiver.getSenderSettleMode() == SenderSettleMode.SETTLED; this.timeout = timeout; this.retryPolicy = retryPolicy; this.subscription = Flux.interval(timeout).subscribe(i -> cleanupWorkItems()); this.sessionIdMono = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .flatMap(state -> { @SuppressWarnings("unchecked") final Map<Symbol, Object> remoteSource = ((Source) receiver.getRemoteSource()).getFilter(); final Object value = remoteSource.get(SESSION_FILTER); if (value == null) { logger.info("entityPath[{}], linkName[{}]. There is no session id.", entityPath, getLinkName()); return Mono.empty(); } final String actualSessionId = String.valueOf(value); return Mono.just(actualSessionId); }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); this.sessionLockedUntil = getEndpointStates().filter(x -> x == AmqpEndpointState.ACTIVE) .next() .map(state -> { if (receiver.getRemoteProperties() != null && receiver.getRemoteProperties().containsKey(LOCKED_UNTIL_UTC)) { final long ticks = (long) receiver.getRemoteProperties().get(LOCKED_UNTIL_UTC); return MessageUtils.convertDotNetTicksToOffsetDateTime(ticks); } else { logger.info("entityPath[{}], linkName[{}]. Locked until not set.", entityPath, getLinkName()); return Instant.EPOCH.atOffset(ZoneOffset.UTC); } }) .cache(value -> Duration.ofMillis(Long.MAX_VALUE), error -> Duration.ZERO, () -> Duration.ZERO); } @Override public Mono<Void> updateDisposition(String lockToken, DeliveryState deliveryState) { if (isDisposed.get()) { return monoError(logger, new IllegalStateException("Cannot perform operations on a disposed receiver.")); } return updateDispositionInternal(lockToken, deliveryState); } @Override public Flux<Message> receive() { return super.receive() .filter(message -> message != EMPTY_MESSAGE) .publishOn(Schedulers.boundedElastic()); } @Override public Mono<String> getSessionId() { return sessionIdMono; } @Override public Mono<OffsetDateTime> getSessionLockedUntil() { return sessionLockedUntil; } @Override public Mono<Void> closeAsync() { if (isDisposed.getAndSet(true)) { return super.closeAsync(); } cleanupWorkItems(); final Mono<Void> disposeMono; if (!pendingUpdates.isEmpty()) { final List<Mono<Void>> pending = new ArrayList<>(); final StringJoiner builder = new StringJoiner(", "); for (UpdateDispositionWorkItem workItem : pendingUpdates.values()) { if (workItem.hasTimedout()) { continue; } if (workItem.getDeliveryState() instanceof TransactionalState) { pending.add(updateDispositionInternal(workItem.getLockToken(), Released.getInstance())); } else { pending.add(workItem.getMono()); } builder.add(workItem.getLockToken()); } logger.info("Waiting for pending updates to complete. Locks: {}", builder.toString()); disposeMono = Mono.when(pending); } else { disposeMono = Mono.empty(); } return disposeMono.onErrorResume(error -> { logger.info("There was an exception while disposing of all links.", error); return Mono.empty(); }).doFinally(signal -> subscription.dispose()).then(super.closeAsync()); } @Override protected Message decodeDelivery(Delivery delivery) { final byte[] deliveryTag = delivery.getTag(); final UUID lockToken; if (deliveryTag != null && deliveryTag.length == LOCK_TOKEN_SIZE) { lockToken = MessageUtils.convertDotNetBytesToUUID(deliveryTag); } else { lockToken = MessageUtils.ZERO_LOCK_TOKEN; } final String lockTokenString = lockToken.toString(); if (lockToken == MessageUtils.ZERO_LOCK_TOKEN || !unsettledDeliveries.containsKey(lockTokenString)) { final int messageSize = delivery.pending(); final byte[] buffer = new byte[messageSize]; final int read = receiver.recv(buffer, 0, messageSize); final Message message = Proton.message(); message.decode(buffer, 0, read); if (isSettled) { delivery.disposition(Accepted.getInstance()); delivery.settle(); } else { unsettledDeliveries.putIfAbsent(lockToken.toString(), delivery); receiver.advance(); } return new MessageWithLockToken(message, lockToken); } else { updateOutcome(lockTokenString, delivery); return EMPTY_MESSAGE; } } /** * Updates the outcome of a delivery. This occurs when a message is being settled from the receiver side. * @param delivery Delivery to update. */ private void updateOutcome(String lockToken, Delivery delivery) { final DeliveryState remoteState = delivery.getRemoteState(); logger.verbose("entityPath[{}], linkName[{}], deliveryTag[{}], state[{}] Received update disposition delivery.", getEntityPath(), getLinkName(), lockToken, remoteState); final Outcome remoteOutcome; if (remoteState instanceof Outcome) { remoteOutcome = (Outcome) remoteState; } else if (remoteState instanceof TransactionalState) { remoteOutcome = ((TransactionalState) remoteState).getOutcome(); } else { remoteOutcome = null; } if (remoteOutcome == null) { logger.warning("linkName[{}], deliveryTag[{}]. No outcome associated with delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } final UpdateDispositionWorkItem workItem = pendingUpdates.get(lockToken); if (workItem == null) { logger.warning("linkName[{}], deliveryTag[{}]. No pending update for delivery. Delivery: {}", getLinkName(), lockToken, delivery); return; } if (remoteState.getType() == workItem.getDeliveryState().getType()) { completeWorkItem(lockToken, delivery, workItem.getSink(), null); return; } logger.info("Received delivery '{}' state '{}' doesn't match expected state '{}'", lockToken, remoteState, workItem.getDeliveryState()); switch (remoteState.getType()) { case Rejected: final Rejected rejected = (Rejected) remoteOutcome; final ErrorCondition errorCondition = rejected.getError(); final Throwable exception = ExceptionUtil.toException(errorCondition.getCondition().toString(), errorCondition.getDescription(), handler.getErrorContext(receiver)); final Duration retry = retryPolicy.calculateRetryDelay(exception, workItem.incrementRetry()); if (retry == null) { logger.info("deliveryTag[{}], state[{}]. Retry attempts exhausted.", lockToken, exception); completeWorkItem(lockToken, delivery, workItem.getSink(), exception); } else { workItem.setLastException(exception); workItem.resetStartTime(); try { provider.getReactorDispatcher().invoke(() -> delivery.disposition(workItem.getDeliveryState())); } catch (IOException error) { final Throwable amqpException = logger.logExceptionAsError(new AmqpException(false, "linkName[%s], deliveryTag[%s]. Retrying updateDisposition failed to dispatch to Reactor.", error, handler.getErrorContext(receiver))); completeWorkItem(lockToken, delivery, workItem.getSink(), amqpException); } } break; case Released: final Throwable cancelled = new AmqpException(false, AmqpErrorCondition.OPERATION_CANCELLED, "AMQP layer unexpectedly aborted or disconnected.", handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}]. Completing pending updateState operation with exception.", lockToken, remoteState.getType(), cancelled); completeWorkItem(lockToken, delivery, workItem.getSink(), cancelled); break; default: final AmqpException error = new AmqpException(false, remoteOutcome.toString(), handler.getErrorContext(receiver)); logger.info("deliveryTag[{}], state[{}] Completing pending updateState operation with exception.", lockToken, remoteState.getType(), error); completeWorkItem(lockToken, delivery, workItem.getSink(), error); break; } } private void cleanupWorkItems() { logger.verbose("linkName[{}]: Cleaning timed out update work tasks.", getLinkName()); pendingUpdates.forEach((key, value) -> { if (value == null || !value.hasTimedout()) { return; } pendingUpdates.remove(key); final Throwable error = value.getLastException() != null ? value.getLastException() : new AmqpException(true, AmqpErrorCondition.TIMEOUT_ERROR, "Update disposition request timed out.", handler.getErrorContext(receiver)); completeWorkItem(key, null, value.getSink(), error); }); } private void completeWorkItem(String lockToken, Delivery delivery, MonoSink<Void> sink, Throwable error) { final boolean isSettled = delivery != null && delivery.remotelySettled(); if (isSettled) { delivery.settle(); } if (error != null) { final Throwable loggedError = error instanceof RuntimeException ? logger.logExceptionAsError((RuntimeException) error) : error; sink.error(loggedError); } else { sink.success(); } if (isSettled) { pendingUpdates.remove(lockToken); unsettledDeliveries.remove(lockToken); } } private static final class UpdateDispositionWorkItem { private final String lockToken; private final DeliveryState state; private final Duration timeout; private final AtomicInteger retryAttempts = new AtomicInteger(); private final AtomicBoolean isDisposed = new AtomicBoolean(); private Mono<Void> mono; private Instant expirationTime; private MonoSink<Void> sink; private Throwable throwable; private UpdateDispositionWorkItem(String lockToken, DeliveryState state, Duration timeout) { this.lockToken = lockToken; this.state = state; this.timeout = timeout; } private boolean hasTimedout() { return expirationTime.isBefore(Instant.now()); } private void resetStartTime() { this.expirationTime = Instant.now().plus(timeout); } private int incrementRetry() { return retryAttempts.incrementAndGet(); } private Throwable getLastException() { return throwable; } private void setLastException(Throwable throwable) { this.throwable = throwable; } private void setMono(Mono<Void> mono) { this.mono = mono; } private Mono<Void> getMono() { return mono; } private MonoSink<Void> getSink() { return sink; } private void start(MonoSink<Void> sink) { Objects.requireNonNull(sink, "'sink' cannot be null."); this.sink = sink; this.sink.onDispose(() -> isDisposed.set(true)); this.sink.onCancel(() -> isDisposed.set(true)); resetStartTime(); } private DeliveryState getDeliveryState() { return state; } public String getLockToken() { return lockToken; } } }
Why we need this api , Context.NONE will not have the span info ?
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
Since this is an existing API and we do not want to break any existing users using this API by removing it.
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
Can we do deprecation,document update because this method will return without doing anything, so could be misleading.
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
BTW, it doesn't look like there are any users https://github.com/Azure/azure-sdk-for-java/search?q=addevent. are we expecting anyone outside of this repo to use it? I understand it's a public API and can't be simply removed. Agree that deprecation would be great. Just curious if we expect anyone else to ever use it :)
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
>are we expecting anyone outside of this repo to use it? Anyone using their own implementation of the azure-core, Tracer class would/could be using this API. Also, we sort to deprecation as the last resort and if use any workarounds instead according to our guidance here https://github.com/Azure/azure-sdk-for-java/wiki/Deprecation
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
yeah, I'm checking my understanding: if something calls into addEvent (which would be one of the Azure client libraries), then custom Tracer implementation would override it. But nothing calls it yet, correct?
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
nit: rename suggestion setHookIdsToAlert ?
public static void main(String[] args) { final MetricsAdvisorAdministrationAsyncClient advisorAdministrationAsyncClient = new MetricsAdvisorAdministrationClientBuilder() .endpoint("https: .credential(new MetricsAdvisorKeyCredential("subscription_key", "api_key")) .buildAsyncClient(); System.out.printf("Creating DataPointAnomaly alert config%n"); String detectionConfigurationId1 = "ff3014a0-bbbb-41ec-a637-677e77b81299"; String detectionConfigurationId2 = "e87d899d-a5a0-4259-b752-11aea34d5e34"; String hookId1 = "5f48er30-6e6e-4391-b78f-b00dfee1e6f5"; String hookId2 = "8i48er30-6e6e-4391-b78f-b00dfee1e6f5"; final Mono<AnomalyAlertConfiguration> createdAnomalyAlertConfigMono = advisorAdministrationAsyncClient.createAlertConfig( new AnomalyAlertConfiguration("My Anomaly Alert config name") .setDescription("alert config description") .setMetricAlertConfigurations(Arrays.asList( new MetricAlertConfiguration(detectionConfigurationId1, MetricAnomalyAlertScope.forWholeSeries()), new MetricAlertConfiguration(detectionConfigurationId2, MetricAnomalyAlertScope.forWholeSeries()) .setAlertConditions(new MetricAnomalyAlertConditions() .setSeverityRangeCondition(new SeverityCondition().setMinAlertSeverity(AnomalySeverity.LOW))))) .setCrossMetricsOperator(MetricAnomalyAlertConfigurationsOperator.AND) .setIdOfHooksToAlert(Arrays.asList(hookId1, hookId2))); createdAnomalyAlertConfigMono .doOnSubscribe(__ -> System.out.printf("Creating DataPoint Anomaly alert config%n")) .doOnSuccess(anomalyAlertConfig -> System.out.printf("Created DataPoint Anomaly alert config: %s%n", anomalyAlertConfig.getId())); Mono<AnomalyAlertConfiguration> fetchAnomalyAlertConfig = createdAnomalyAlertConfigMono.flatMap(createdConfig -> { return advisorAdministrationAsyncClient.getAlertConfig(createdConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Fetching DataPoint Anomaly alert config: %s%n", createdConfig.getId())) .doOnSuccess(config -> System.out.printf("Fetched DataPoint Anomaly alert config%n")) .doOnNext(anomalyAlertConfig -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfig.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfig.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfig.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfig.getIdOfHooksToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfig.getCrossMetricsOperator().toString()); System.out.println("DataFeedMetric level alert configurations for this anomaly alert config:"); anomalyAlertConfig.getMetricAlertConfigurations(). forEach(metricAnomalyAlertConfiguration -> { System.out.printf("Anomaly Alert detection configuration Id: %s%n", metricAnomalyAlertConfiguration.getDetectionConfigurationId()); System.out.printf("Anomaly Alert configuration negation value", metricAnomalyAlertConfiguration.isNegationOperationEnabled()); System.out.printf("Anomaly Alert configuration scope type", metricAnomalyAlertConfiguration.getAlertScope().getScopeType().toString()); }); }); }); Mono<AnomalyAlertConfiguration> updatedAlertConfigMono = fetchAnomalyAlertConfig .flatMap(anomalyAlertConfig -> { List<String> hookIds = new ArrayList<>(anomalyAlertConfig.getIdOfHooksToAlert()); hookIds.remove(hookId2); return advisorAdministrationAsyncClient.updateAlertConfig( anomalyAlertConfig .setIdOfHooksToAlert(hookIds) .setDescription("updated to remove hookId2")) .doOnSubscribe(__ -> System.out.printf("Updating anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> { System.out.printf("Updated anomaly alert config%n"); System.out.println("Updated anomaly alert config hook Id list:"); anomalyAlertConfig.getIdOfHooksToAlert().forEach(System.out::println); }); }); Mono<Void> deletedAnomalyAlertConfigMono = updatedAlertConfigMono.flatMap(anomalyAlertConfig -> { return advisorAdministrationAsyncClient.deleteAlertConfig(anomalyAlertConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Deleting anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> System.out.printf("Deleted anomaly alert config%n")); }); /* This will block until all the above CRUD on operation on email hook is completed. This is strongly discouraged for use in production as it eliminates the benefits of asynchronous IO. It is used here to ensure the sample runs to completion. */ deletedAnomalyAlertConfigMono.block(); System.out.printf("Listing DataPoint Anomaly alert configs for a detection configurations%n"); advisorAdministrationAsyncClient.listAlertConfigs(detectionConfigurationId1, new ListAnomalyAlertConfigsOptions()) .doOnNext(anomalyAlertConfigurationItem -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfigurationItem.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfigurationItem.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfigurationItem.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfigurationItem.getIdOfHooksToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfigurationItem.getCrossMetricsOperator().toString()); }); }
.setIdOfHooksToAlert(hookIds)
public static void main(String[] args) { final MetricsAdvisorAdministrationAsyncClient advisorAdministrationAsyncClient = new MetricsAdvisorAdministrationClientBuilder() .endpoint("https: .credential(new MetricsAdvisorKeyCredential("subscription_key", "api_key")) .buildAsyncClient(); System.out.printf("Creating DataPointAnomaly alert config%n"); String detectionConfigurationId1 = "ff3014a0-bbbb-41ec-a637-677e77b81299"; String detectionConfigurationId2 = "e87d899d-a5a0-4259-b752-11aea34d5e34"; String hookId1 = "5f48er30-6e6e-4391-b78f-b00dfee1e6f5"; String hookId2 = "8i48er30-6e6e-4391-b78f-b00dfee1e6f5"; final Mono<AnomalyAlertConfiguration> createdAnomalyAlertConfigMono = advisorAdministrationAsyncClient.createAlertConfig( new AnomalyAlertConfiguration("My Anomaly Alert config name") .setDescription("alert config description") .setMetricAlertConfigurations(Arrays.asList( new MetricAlertConfiguration(detectionConfigurationId1, MetricAnomalyAlertScope.forWholeSeries()), new MetricAlertConfiguration(detectionConfigurationId2, MetricAnomalyAlertScope.forWholeSeries()) .setAlertConditions(new MetricAnomalyAlertConditions() .setSeverityRangeCondition(new SeverityCondition().setMinAlertSeverity(AnomalySeverity.LOW))))) .setCrossMetricsOperator(MetricAlertConfigurationsOperator.AND) .setHookIdsToAlert(Arrays.asList(hookId1, hookId2))); createdAnomalyAlertConfigMono .doOnSubscribe(__ -> System.out.printf("Creating DataPoint Anomaly alert config%n")) .doOnSuccess(anomalyAlertConfig -> System.out.printf("Created DataPoint Anomaly alert config: %s%n", anomalyAlertConfig.getId())); Mono<AnomalyAlertConfiguration> fetchAnomalyAlertConfig = createdAnomalyAlertConfigMono.flatMap(createdConfig -> { return advisorAdministrationAsyncClient.getAlertConfig(createdConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Fetching DataPoint Anomaly alert config: %s%n", createdConfig.getId())) .doOnSuccess(config -> System.out.printf("Fetched DataPoint Anomaly alert config%n")) .doOnNext(anomalyAlertConfig -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfig.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfig.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfig.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfig.getHookIdsToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfig.getCrossMetricsOperator().toString()); System.out.println("DataFeedMetric level alert configurations for this anomaly alert config:"); anomalyAlertConfig.getMetricAlertConfigurations(). forEach(metricAnomalyAlertConfiguration -> { System.out.printf("Anomaly Alert detection configuration Id: %s%n", metricAnomalyAlertConfiguration.getDetectionConfigurationId()); System.out.printf("Anomaly Alert configuration negation value", metricAnomalyAlertConfiguration.isNegationOperationEnabled()); System.out.printf("Anomaly Alert configuration scope type", metricAnomalyAlertConfiguration.getAlertScope().getScopeType().toString()); }); }); }); Mono<AnomalyAlertConfiguration> updatedAlertConfigMono = fetchAnomalyAlertConfig .flatMap(anomalyAlertConfig -> { List<String> hookIds = new ArrayList<>(anomalyAlertConfig.getHookIdsToAlert()); hookIds.remove(hookId2); return advisorAdministrationAsyncClient.updateAlertConfig( anomalyAlertConfig .setHookIdsToAlert(hookIds) .setDescription("updated to remove hookId2")) .doOnSubscribe(__ -> System.out.printf("Updating anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> { System.out.printf("Updated anomaly alert config%n"); System.out.println("Updated anomaly alert config hook Id list:"); anomalyAlertConfig.getHookIdsToAlert().forEach(System.out::println); }); }); Mono<Void> deletedAnomalyAlertConfigMono = updatedAlertConfigMono.flatMap(anomalyAlertConfig -> { return advisorAdministrationAsyncClient.deleteAlertConfig(anomalyAlertConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Deleting anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> System.out.printf("Deleted anomaly alert config%n")); }); /* This will block until all the above CRUD on operation on email hook is completed. This is strongly discouraged for use in production as it eliminates the benefits of asynchronous IO. It is used here to ensure the sample runs to completion. */ deletedAnomalyAlertConfigMono.block(); System.out.printf("Listing DataPoint Anomaly alert configs for a detection configurations%n"); advisorAdministrationAsyncClient.listAlertConfigs(detectionConfigurationId1, new ListAnomalyAlertConfigsOptions()) .doOnNext(anomalyAlertConfigurationItem -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfigurationItem.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfigurationItem.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfigurationItem.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfigurationItem.getHookIdsToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfigurationItem.getCrossMetricsOperator().toString()); }); }
class MetricsAnomalyAlertConfigOperationsAsyncSample { }
class MetricsAnomalyAlertConfigOperationsAsyncSample { }
is this needed to maintain existing behavior? (agree hopefully no one is using it anyways) ```suggestion addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); ```
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
Agree with @trask, I think it would make sense to keep this overload with a defaulting behavior to look for the current span in scope. For the users, who do not want any specific span for the event to be attached to. @lmolkova thoughts?
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
(I would also support deprecation, this method does seem a bit out of place since the azure tracing API seems generally designed around passing explicit context)
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, Context.NONE); }
addEvent(eventName, traceEventAttributes, timestamp, Context.NONE);
public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp) { addEvent(eventName, traceEventAttributes, timestamp, new Context(PARENT_SPAN_KEY, Span.current())); }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.warning("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.warning("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.warning("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.warning("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.warning("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.info("Failed to find a starting span to associate the event %s with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
class OpenTelemetryTracer implements com.azure.core.util.tracing.Tracer { private final Tracer tracer = GlobalOpenTelemetry.getTracer("Azure-OpenTelemetry"); static final String AZ_NAMESPACE_KEY = "az.namespace"; static final String MESSAGE_BUS_DESTINATION = "message_bus.destination"; static final String PEER_ENDPOINT = "peer.address"; private final ClientLogger logger = new ClientLogger(OpenTelemetryTracer.class); /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); SpanBuilder spanBuilder = getSpanBuilder(spanName, context); Span span = spanBuilder.startSpan(); if (span.isRecording()) { String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } return context.addData(PARENT_SPAN_KEY, span); } /** * {@inheritDoc} */ @Override public Context start(String spanName, Context context, ProcessKind processKind) { Objects.requireNonNull(spanName, "'spanName' cannot be null."); Objects.requireNonNull(context, "'context' cannot be null."); Objects.requireNonNull(processKind, "'processKind' cannot be null."); Span span; SpanBuilder spanBuilder; switch (processKind) { case SEND: spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { return Context.NONE; } span = spanBuilder.setSpanKind(SpanKind.CLIENT).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span); case MESSAGE: spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.PRODUCER).startSpan(); if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } context = setContextData(span); return context.addData(PARENT_SPAN_KEY, span); case PROCESS: return startScopedSpan(spanName, context); default: return Context.NONE; } } /** * {@inheritDoc} */ @Override public void end(int responseCode, Throwable throwable, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { return; } if (span.isRecording()) { span = HttpTraceUtil.setSpanStatus(span, responseCode, throwable); } span.end(); } /** * {@inheritDoc} */ @Override public void setAttribute(String key, String value, Context context) { Objects.requireNonNull(context, "'context' cannot be null"); if (CoreUtils.isNullOrEmpty(value)) { logger.verbose("Failed to set span attribute since value is null or empty."); return; } final Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span != null) { span.setAttribute(key, value); } else { logger.verbose("Failed to find span to add attribute."); } } /** * {@inheritDoc} */ @Override public Context setSpanName(String spanName, Context context) { return context.addData(USER_SPAN_NAME_KEY, spanName); } /** * {@inheritDoc} */ @Override public void end(String statusMessage, Throwable throwable, Context context) { Span span = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (span == null) { logger.verbose("Failed to find span to end it."); return; } if (span.isRecording()) { span = AmqpTraceUtil.parseStatusMessage(span, statusMessage, throwable); } span.end(); } @Override public void addLink(Context context) { final SpanBuilder spanBuilder = getOrDefault(context, SPAN_BUILDER_KEY, null, SpanBuilder.class); if (spanBuilder == null) { logger.verbose("Failed to find spanBuilder to link it."); return; } final SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext == null) { logger.verbose("Failed to find span context to link it."); return; } spanBuilder.addLink(spanContext); } /** * {@inheritDoc} */ @Override public Context extractContext(String diagnosticId, Context context) { return AmqpPropagationFormatUtil.extractContext(diagnosticId, context); } @Override public Context getSharedSpanBuilder(String spanName, Context context) { return context.addData(SPAN_BUILDER_KEY, getSpanBuilder(spanName, context)); } /** * {@inheritDoc} */ @Override @SuppressWarnings("deprecation") /** * {@inheritDoc} */ @Override public void addEvent(String eventName, Map<String, Object> traceEventAttributes, OffsetDateTime timestamp, Context context) { Objects.requireNonNull(eventName, "'eventName' cannot be null."); Span currentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); if (currentSpan == null) { logger.verbose("Failed to find a starting span to associate the {} with.", eventName); return; } if (timestamp == null) { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes)); } else { currentSpan.addEvent( eventName, traceEventAttributes == null ? Attributes.empty() : convertToOtelAttributes(traceEventAttributes), timestamp.toInstant() ); } } /** * Maps span/event properties to OpenTelemetry attributes. * * @param attributes the attributes provided by the client SDK's. * @return the OpenTelemetry typed {@Link Attributes}. */ private Attributes convertToOtelAttributes(Map<String, Object> attributes) { AttributesBuilder attributesBuilder = Attributes.builder(); attributes.forEach((key, value) -> { if (value instanceof Boolean) { attributesBuilder.put(key, (boolean) value); } else if (value instanceof String) { attributesBuilder.put(key, String.valueOf(value)); } else if (value instanceof Double) { attributesBuilder.put(key, (Double) value); } else if (value instanceof Long) { attributesBuilder.put(key, (Long) value); } else if (value instanceof String[]) { attributesBuilder.put(key, (String[]) value); } else if (value instanceof long[]) { attributesBuilder.put(key, (long[]) value); } else if (value instanceof double[]) { attributesBuilder.put(key, (double[]) value); } else if (value instanceof boolean[]) { attributesBuilder.put(key, (boolean[]) value); } }); return attributesBuilder.build(); } /** * Starts a new child {@link Span} with parent being the remote and uses the {@link Span} is in the current Context, * to return an object that represents that scope. * <p>The scope is exited when the returned object is closed.</p> * * @param spanName The name of the returned Span. * @param context The {@link Context} containing the {@link SpanContext}. * @return The returned {@link Span} and the scope in a {@link Context} object. */ private Context startScopedSpan(String spanName, Context context) { Objects.requireNonNull(context, "'context' cannot be null."); Span span; SpanContext spanContext = getOrDefault(context, SPAN_CONTEXT_KEY, null, SpanContext.class); if (spanContext != null) { span = startSpanWithRemoteParent(spanName, spanContext); } else { SpanBuilder spanBuilder = getSpanBuilder(spanName, context); span = spanBuilder.setSpanKind(SpanKind.CONSUMER).startSpan(); } if (span.isRecording()) { addSpanRequestAttributes(span, context, spanName); } return context.addData(PARENT_SPAN_KEY, span).addData("scope", span.makeCurrent()); } /** * Creates a {@link SpanBuilder} to create and start a new child {@link Span} with parent being the remote and * designated by the {@link SpanContext}. * * @param spanName The name of the returned Span. * @param spanContext The remote parent context of the returned Span. * @return A {@link Span} with parent being the remote {@link Span} designated by the {@link SpanContext}. */ private Span startSpanWithRemoteParent(String spanName, SpanContext spanContext) { SpanBuilder spanBuilder = tracer.spanBuilder(spanName) .setParent(io.opentelemetry.context.Context.root().with(Span.wrap(spanContext))); spanBuilder.setSpanKind(SpanKind.CONSUMER); return spanBuilder.startSpan(); } /** * Extracts the {@link SpanContext trace identifiers} and the {@link SpanContext} of the current tracing span as * text and returns in a {@link Context} object. * * @param span The current tracing span. * @return The {@link Context} containing the {@link SpanContext} and trace-parent of the current span. */ private static Context setContextData(Span span) { SpanContext spanContext = span.getSpanContext(); final String traceparent = AmqpPropagationFormatUtil.getDiagnosticId(spanContext); if (traceparent == null) { return Context.NONE; } return new Context(DIAGNOSTIC_ID_KEY, traceparent).addData(SPAN_CONTEXT_KEY, spanContext); } /** * Extracts request attributes from the given {@code context} and adds it to the started span. * * @param span The span to which request attributes are to be added. * @param context The context containing the request attributes. * @param spanName The name of the returned Span containing the component value. */ private void addSpanRequestAttributes(Span span, Context context, String spanName) { Objects.requireNonNull(span, "'span' cannot be null."); String entityPath = getOrDefault(context, ENTITY_PATH_KEY, null, String.class); if (entityPath != null) { span.setAttribute(MESSAGE_BUS_DESTINATION, entityPath); } String hostName = getOrDefault(context, HOST_NAME_KEY, null, String.class); if (hostName != null) { span.setAttribute(PEER_ENDPOINT, hostName); } Long messageEnqueuedTime = getOrDefault(context, MESSAGE_ENQUEUED_TIME, null, Long.class); if (messageEnqueuedTime != null) { span.setAttribute(MESSAGE_ENQUEUED_TIME, messageEnqueuedTime); } String tracingNamespace = getOrDefault(context, AZ_TRACING_NAMESPACE_KEY, null, String.class); if (tracingNamespace != null) { span.setAttribute(AZ_NAMESPACE_KEY, tracingNamespace); } } /** * Returns a {@link SpanBuilder} to create and start a new child {@link Span} with parent * being the designated {@code Span}. * * @param spanName The name of the returned Span. * @param context The context containing the span and the span name. * @return A {@code Span.SpanBuilder} to create and start a new {@code Span}. */ private SpanBuilder getSpanBuilder(String spanName, Context context) { Span parentSpan = getOrDefault(context, PARENT_SPAN_KEY, null, Span.class); String spanNameKey = getOrDefault(context, USER_SPAN_NAME_KEY, null, String.class); if (spanNameKey == null) { spanNameKey = spanName; } if (parentSpan == null) { parentSpan = Span.current(); } return tracer.spanBuilder(spanNameKey) .setParent(io.opentelemetry.context.Context.current().with(parentSpan)); } /** * Returns the value of the specified key from the context. * * @param key The name of the attribute that needs to be extracted from the {@code Context}. * @param defaultValue the value to return in data not found. * @param clazz clazz the type of raw class to find data for. * @param context The context containing the specified key. * @return The T type of raw class object */ @SuppressWarnings("unchecked") private <T> T getOrDefault(Context context, String key, T defaultValue, Class<T> clazz) { final Optional<Object> optional = context.getData(key); final Object result = optional.filter(value -> clazz.isAssignableFrom(value.getClass())).orElseGet(() -> { logger.verbose("Could not extract key '{}' of type '{}' from context.", key, clazz); return defaultValue; }); return (T) result; } }
thanks, yes this sounds better, will update.
public static void main(String[] args) { final MetricsAdvisorAdministrationAsyncClient advisorAdministrationAsyncClient = new MetricsAdvisorAdministrationClientBuilder() .endpoint("https: .credential(new MetricsAdvisorKeyCredential("subscription_key", "api_key")) .buildAsyncClient(); System.out.printf("Creating DataPointAnomaly alert config%n"); String detectionConfigurationId1 = "ff3014a0-bbbb-41ec-a637-677e77b81299"; String detectionConfigurationId2 = "e87d899d-a5a0-4259-b752-11aea34d5e34"; String hookId1 = "5f48er30-6e6e-4391-b78f-b00dfee1e6f5"; String hookId2 = "8i48er30-6e6e-4391-b78f-b00dfee1e6f5"; final Mono<AnomalyAlertConfiguration> createdAnomalyAlertConfigMono = advisorAdministrationAsyncClient.createAlertConfig( new AnomalyAlertConfiguration("My Anomaly Alert config name") .setDescription("alert config description") .setMetricAlertConfigurations(Arrays.asList( new MetricAlertConfiguration(detectionConfigurationId1, MetricAnomalyAlertScope.forWholeSeries()), new MetricAlertConfiguration(detectionConfigurationId2, MetricAnomalyAlertScope.forWholeSeries()) .setAlertConditions(new MetricAnomalyAlertConditions() .setSeverityRangeCondition(new SeverityCondition().setMinAlertSeverity(AnomalySeverity.LOW))))) .setCrossMetricsOperator(MetricAnomalyAlertConfigurationsOperator.AND) .setIdOfHooksToAlert(Arrays.asList(hookId1, hookId2))); createdAnomalyAlertConfigMono .doOnSubscribe(__ -> System.out.printf("Creating DataPoint Anomaly alert config%n")) .doOnSuccess(anomalyAlertConfig -> System.out.printf("Created DataPoint Anomaly alert config: %s%n", anomalyAlertConfig.getId())); Mono<AnomalyAlertConfiguration> fetchAnomalyAlertConfig = createdAnomalyAlertConfigMono.flatMap(createdConfig -> { return advisorAdministrationAsyncClient.getAlertConfig(createdConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Fetching DataPoint Anomaly alert config: %s%n", createdConfig.getId())) .doOnSuccess(config -> System.out.printf("Fetched DataPoint Anomaly alert config%n")) .doOnNext(anomalyAlertConfig -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfig.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfig.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfig.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfig.getIdOfHooksToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfig.getCrossMetricsOperator().toString()); System.out.println("DataFeedMetric level alert configurations for this anomaly alert config:"); anomalyAlertConfig.getMetricAlertConfigurations(). forEach(metricAnomalyAlertConfiguration -> { System.out.printf("Anomaly Alert detection configuration Id: %s%n", metricAnomalyAlertConfiguration.getDetectionConfigurationId()); System.out.printf("Anomaly Alert configuration negation value", metricAnomalyAlertConfiguration.isNegationOperationEnabled()); System.out.printf("Anomaly Alert configuration scope type", metricAnomalyAlertConfiguration.getAlertScope().getScopeType().toString()); }); }); }); Mono<AnomalyAlertConfiguration> updatedAlertConfigMono = fetchAnomalyAlertConfig .flatMap(anomalyAlertConfig -> { List<String> hookIds = new ArrayList<>(anomalyAlertConfig.getIdOfHooksToAlert()); hookIds.remove(hookId2); return advisorAdministrationAsyncClient.updateAlertConfig( anomalyAlertConfig .setIdOfHooksToAlert(hookIds) .setDescription("updated to remove hookId2")) .doOnSubscribe(__ -> System.out.printf("Updating anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> { System.out.printf("Updated anomaly alert config%n"); System.out.println("Updated anomaly alert config hook Id list:"); anomalyAlertConfig.getIdOfHooksToAlert().forEach(System.out::println); }); }); Mono<Void> deletedAnomalyAlertConfigMono = updatedAlertConfigMono.flatMap(anomalyAlertConfig -> { return advisorAdministrationAsyncClient.deleteAlertConfig(anomalyAlertConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Deleting anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> System.out.printf("Deleted anomaly alert config%n")); }); /* This will block until all the above CRUD on operation on email hook is completed. This is strongly discouraged for use in production as it eliminates the benefits of asynchronous IO. It is used here to ensure the sample runs to completion. */ deletedAnomalyAlertConfigMono.block(); System.out.printf("Listing DataPoint Anomaly alert configs for a detection configurations%n"); advisorAdministrationAsyncClient.listAlertConfigs(detectionConfigurationId1, new ListAnomalyAlertConfigsOptions()) .doOnNext(anomalyAlertConfigurationItem -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfigurationItem.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfigurationItem.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfigurationItem.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfigurationItem.getIdOfHooksToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfigurationItem.getCrossMetricsOperator().toString()); }); }
.setIdOfHooksToAlert(hookIds)
public static void main(String[] args) { final MetricsAdvisorAdministrationAsyncClient advisorAdministrationAsyncClient = new MetricsAdvisorAdministrationClientBuilder() .endpoint("https: .credential(new MetricsAdvisorKeyCredential("subscription_key", "api_key")) .buildAsyncClient(); System.out.printf("Creating DataPointAnomaly alert config%n"); String detectionConfigurationId1 = "ff3014a0-bbbb-41ec-a637-677e77b81299"; String detectionConfigurationId2 = "e87d899d-a5a0-4259-b752-11aea34d5e34"; String hookId1 = "5f48er30-6e6e-4391-b78f-b00dfee1e6f5"; String hookId2 = "8i48er30-6e6e-4391-b78f-b00dfee1e6f5"; final Mono<AnomalyAlertConfiguration> createdAnomalyAlertConfigMono = advisorAdministrationAsyncClient.createAlertConfig( new AnomalyAlertConfiguration("My Anomaly Alert config name") .setDescription("alert config description") .setMetricAlertConfigurations(Arrays.asList( new MetricAlertConfiguration(detectionConfigurationId1, MetricAnomalyAlertScope.forWholeSeries()), new MetricAlertConfiguration(detectionConfigurationId2, MetricAnomalyAlertScope.forWholeSeries()) .setAlertConditions(new MetricAnomalyAlertConditions() .setSeverityRangeCondition(new SeverityCondition().setMinAlertSeverity(AnomalySeverity.LOW))))) .setCrossMetricsOperator(MetricAlertConfigurationsOperator.AND) .setHookIdsToAlert(Arrays.asList(hookId1, hookId2))); createdAnomalyAlertConfigMono .doOnSubscribe(__ -> System.out.printf("Creating DataPoint Anomaly alert config%n")) .doOnSuccess(anomalyAlertConfig -> System.out.printf("Created DataPoint Anomaly alert config: %s%n", anomalyAlertConfig.getId())); Mono<AnomalyAlertConfiguration> fetchAnomalyAlertConfig = createdAnomalyAlertConfigMono.flatMap(createdConfig -> { return advisorAdministrationAsyncClient.getAlertConfig(createdConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Fetching DataPoint Anomaly alert config: %s%n", createdConfig.getId())) .doOnSuccess(config -> System.out.printf("Fetched DataPoint Anomaly alert config%n")) .doOnNext(anomalyAlertConfig -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfig.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfig.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfig.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfig.getHookIdsToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfig.getCrossMetricsOperator().toString()); System.out.println("DataFeedMetric level alert configurations for this anomaly alert config:"); anomalyAlertConfig.getMetricAlertConfigurations(). forEach(metricAnomalyAlertConfiguration -> { System.out.printf("Anomaly Alert detection configuration Id: %s%n", metricAnomalyAlertConfiguration.getDetectionConfigurationId()); System.out.printf("Anomaly Alert configuration negation value", metricAnomalyAlertConfiguration.isNegationOperationEnabled()); System.out.printf("Anomaly Alert configuration scope type", metricAnomalyAlertConfiguration.getAlertScope().getScopeType().toString()); }); }); }); Mono<AnomalyAlertConfiguration> updatedAlertConfigMono = fetchAnomalyAlertConfig .flatMap(anomalyAlertConfig -> { List<String> hookIds = new ArrayList<>(anomalyAlertConfig.getHookIdsToAlert()); hookIds.remove(hookId2); return advisorAdministrationAsyncClient.updateAlertConfig( anomalyAlertConfig .setHookIdsToAlert(hookIds) .setDescription("updated to remove hookId2")) .doOnSubscribe(__ -> System.out.printf("Updating anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> { System.out.printf("Updated anomaly alert config%n"); System.out.println("Updated anomaly alert config hook Id list:"); anomalyAlertConfig.getHookIdsToAlert().forEach(System.out::println); }); }); Mono<Void> deletedAnomalyAlertConfigMono = updatedAlertConfigMono.flatMap(anomalyAlertConfig -> { return advisorAdministrationAsyncClient.deleteAlertConfig(anomalyAlertConfig.getId()) .doOnSubscribe(__ -> System.out.printf("Deleting anomaly alert config: %s%n", anomalyAlertConfig.getId())) .doOnSuccess(config -> System.out.printf("Deleted anomaly alert config%n")); }); /* This will block until all the above CRUD on operation on email hook is completed. This is strongly discouraged for use in production as it eliminates the benefits of asynchronous IO. It is used here to ensure the sample runs to completion. */ deletedAnomalyAlertConfigMono.block(); System.out.printf("Listing DataPoint Anomaly alert configs for a detection configurations%n"); advisorAdministrationAsyncClient.listAlertConfigs(detectionConfigurationId1, new ListAnomalyAlertConfigsOptions()) .doOnNext(anomalyAlertConfigurationItem -> { System.out.printf("DataPoint Anomaly alert config Id : %s%n", anomalyAlertConfigurationItem.getId()); System.out.printf("DataPoint Anomaly alert config name : %s%n", anomalyAlertConfigurationItem.getName()); System.out.printf("DataPoint Anomaly alert config description : %s%n", anomalyAlertConfigurationItem.getDescription()); System.out.println("DataPoint Anomaly alert configuration hook ids:"); anomalyAlertConfigurationItem.getHookIdsToAlert().forEach(System.out::println); System.out.printf("DataPoint Anomaly alert configuration cross metrics operator: %s%n", anomalyAlertConfigurationItem.getCrossMetricsOperator().toString()); }); }
class MetricsAnomalyAlertConfigOperationsAsyncSample { }
class MetricsAnomalyAlertConfigOperationsAsyncSample { }
This is an ancillary performance question, does the Java proxy interface pass the Object[] in a consistent order? I'm wondering if there could be an optimization for this.
public RequestOptions setRequestOptions(Object[] swaggerMethodArguments) { return CoreUtils.findFirstOfType(swaggerMethodArguments, RequestOptions.class); }
return CoreUtils.findFirstOfType(swaggerMethodArguments, RequestOptions.class);
public RequestOptions setRequestOptions(Object[] swaggerMethodArguments) { return CoreUtils.findFirstOfType(swaggerMethodArguments, RequestOptions.class); }
class SwaggerMethodParser implements HttpResponseDecodeData { private static final Pattern PATTERN_COLON_SLASH_SLASH = Pattern.compile(": private final SerializerAdapter serializer; private final String rawHost; private final String fullyQualifiedMethodName; private final HttpMethod httpMethod; private final String relativePath; private final List<Substitution> hostSubstitutions = new ArrayList<>(); private final List<Substitution> pathSubstitutions = new ArrayList<>(); private final List<Substitution> querySubstitutions = new ArrayList<>(); private final List<Substitution> formSubstitutions = new ArrayList<>(); private final List<Substitution> headerSubstitutions = new ArrayList<>(); private final HttpHeaders headers = new HttpHeaders(); private final Integer bodyContentMethodParameterIndex; private final String bodyContentType; private final Type bodyJavaType; private final BitSet expectedStatusCodes; private final Type returnType; private final Type returnValueWireType; private final UnexpectedResponseExceptionType[] unexpectedResponseExceptionTypes; private Map<Integer, UnexpectedExceptionInformation> exceptionMapping; private UnexpectedExceptionInformation defaultException; /** * Create a SwaggerMethodParser object using the provided fully qualified method name. * * @param swaggerMethod the Swagger method to parse. * @param rawHost the raw host value from the @Host annotation. Before this can be used as the host value in an HTTP * request, it must be processed through the possible host substitutions. */ SwaggerMethodParser(Method swaggerMethod, String rawHost) { this(swaggerMethod, rawHost, JacksonAdapter.createDefaultSerializerAdapter()); } SwaggerMethodParser(Method swaggerMethod, String rawHost, SerializerAdapter serializer) { this.serializer = serializer; this.rawHost = rawHost; final Class<?> swaggerInterface = swaggerMethod.getDeclaringClass(); fullyQualifiedMethodName = swaggerInterface.getName() + "." + swaggerMethod.getName(); if (swaggerMethod.isAnnotationPresent(Get.class)) { this.httpMethod = HttpMethod.GET; this.relativePath = swaggerMethod.getAnnotation(Get.class).value(); } else if (swaggerMethod.isAnnotationPresent(Put.class)) { this.httpMethod = HttpMethod.PUT; this.relativePath = swaggerMethod.getAnnotation(Put.class).value(); } else if (swaggerMethod.isAnnotationPresent(Head.class)) { this.httpMethod = HttpMethod.HEAD; this.relativePath = swaggerMethod.getAnnotation(Head.class).value(); } else if (swaggerMethod.isAnnotationPresent(Delete.class)) { this.httpMethod = HttpMethod.DELETE; this.relativePath = swaggerMethod.getAnnotation(Delete.class).value(); } else if (swaggerMethod.isAnnotationPresent(Post.class)) { this.httpMethod = HttpMethod.POST; this.relativePath = swaggerMethod.getAnnotation(Post.class).value(); } else if (swaggerMethod.isAnnotationPresent(Patch.class)) { this.httpMethod = HttpMethod.PATCH; this.relativePath = swaggerMethod.getAnnotation(Patch.class).value(); } else { throw new MissingRequiredAnnotationException(Arrays.asList(Get.class, Put.class, Head.class, Delete.class, Post.class, Patch.class), swaggerMethod); } returnType = swaggerMethod.getGenericReturnType(); final ReturnValueWireType returnValueWireTypeAnnotation = swaggerMethod.getAnnotation(ReturnValueWireType.class); if (returnValueWireTypeAnnotation != null) { Class<?> returnValueWireType = returnValueWireTypeAnnotation.value(); if (returnValueWireType == Base64Url.class || returnValueWireType == UnixTime.class || returnValueWireType == DateTimeRfc1123.class) { this.returnValueWireType = returnValueWireType; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, List.class)) { this.returnValueWireType = returnValueWireType.getGenericInterfaces()[0]; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, Page.class)) { this.returnValueWireType = returnValueWireType; } else { this.returnValueWireType = null; } } else { this.returnValueWireType = null; } if (swaggerMethod.isAnnotationPresent(Headers.class)) { final Headers headersAnnotation = swaggerMethod.getAnnotation(Headers.class); final String[] headers = headersAnnotation.value(); for (final String header : headers) { final int colonIndex = header.indexOf(":"); if (colonIndex >= 0) { final String headerName = header.substring(0, colonIndex).trim(); if (!headerName.isEmpty()) { final String headerValue = header.substring(colonIndex + 1).trim(); if (!headerValue.isEmpty()) { if (headerValue.contains(",")) { this.headers.set(headerName, Arrays.asList(headerValue.split(","))); } else { this.headers.set(headerName, headerValue); } } } } } } final ExpectedResponses expectedResponses = swaggerMethod.getAnnotation(ExpectedResponses.class); if (expectedResponses != null && expectedResponses.value().length > 0) { expectedStatusCodes = new BitSet(); for (int code : expectedResponses.value()) { expectedStatusCodes.set(code); } } else { expectedStatusCodes = null; } unexpectedResponseExceptionTypes = swaggerMethod.getAnnotationsByType(UnexpectedResponseExceptionType.class); Integer bodyContentMethodParameterIndex = null; String bodyContentType = null; Type bodyJavaType = null; final Annotation[][] allParametersAnnotations = swaggerMethod.getParameterAnnotations(); for (int parameterIndex = 0; parameterIndex < allParametersAnnotations.length; ++parameterIndex) { final Annotation[] parameterAnnotations = swaggerMethod.getParameterAnnotations()[parameterIndex]; for (final Annotation annotation : parameterAnnotations) { final Class<? extends Annotation> annotationType = annotation.annotationType(); if (annotationType.equals(HostParam.class)) { final HostParam hostParamAnnotation = (HostParam) annotation; hostSubstitutions.add(new Substitution(hostParamAnnotation.value(), parameterIndex, !hostParamAnnotation.encoded())); } else if (annotationType.equals(PathParam.class)) { final PathParam pathParamAnnotation = (PathParam) annotation; pathSubstitutions.add(new Substitution(pathParamAnnotation.value(), parameterIndex, !pathParamAnnotation.encoded())); } else if (annotationType.equals(QueryParam.class)) { final QueryParam queryParamAnnotation = (QueryParam) annotation; querySubstitutions.add(new Substitution(queryParamAnnotation.value(), parameterIndex, !queryParamAnnotation.encoded())); } else if (annotationType.equals(HeaderParam.class)) { final HeaderParam headerParamAnnotation = (HeaderParam) annotation; headerSubstitutions.add(new Substitution(headerParamAnnotation.value(), parameterIndex, false)); } else if (annotationType.equals(BodyParam.class)) { final BodyParam bodyParamAnnotation = (BodyParam) annotation; bodyContentMethodParameterIndex = parameterIndex; bodyContentType = bodyParamAnnotation.value(); bodyJavaType = swaggerMethod.getGenericParameterTypes()[parameterIndex]; } else if (annotationType.equals(FormParam.class)) { final FormParam formParamAnnotation = (FormParam) annotation; formSubstitutions.add(new Substitution(formParamAnnotation.value(), parameterIndex, !formParamAnnotation.encoded())); bodyContentType = ContentType.APPLICATION_X_WWW_FORM_URLENCODED; bodyJavaType = String.class; } } } this.bodyContentMethodParameterIndex = bodyContentMethodParameterIndex; this.bodyContentType = bodyContentType; this.bodyJavaType = bodyJavaType; } /** * Get the fully qualified method that was called to invoke this HTTP request. * * @return the fully qualified method that was called to invoke this HTTP request */ public String getFullyQualifiedMethodName() { return fullyQualifiedMethodName; } /** * Get the HTTP method that will be used to complete the Swagger method's request. * * @return the HTTP method that will be used to complete the Swagger method's request */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Sets the scheme and host to use for HTTP requests for this Swagger method. * * @param swaggerMethodArguments The arguments to use for scheme and host substitutions. * @param urlBuilder The {@link UrlBuilder} that will have its scheme and host set. */ public void setSchemeAndHost(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { final String substitutedHost = applySubstitutions(rawHost, hostSubstitutions, swaggerMethodArguments); final String[] substitutedHostParts = PATTERN_COLON_SLASH_SLASH.split(substitutedHost); if (substitutedHostParts.length >= 2) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHostParts[1]); } else if (substitutedHostParts.length == 1) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHost); } else { urlBuilder.setHost(substitutedHost); } } /** * Get the path that will be used to complete the Swagger method's request. * * @param methodArguments the method arguments to use with the path substitutions * @return the path value with its placeholders replaced by the matching substitutions */ public String setPath(Object[] methodArguments) { return applySubstitutions(relativePath, pathSubstitutions, methodArguments); } /** * Sets the encoded query parameters that have been added to this value based on the provided method arguments into * the passed {@link UrlBuilder}. * * @param swaggerMethodArguments the arguments that will be used to create the query parameters' values * @param urlBuilder The {@link UrlBuilder} where the encoded query parameters will be set. */ public void setEncodedQueryParameters(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { if (swaggerMethodArguments == null) { return; } for (Substitution substitution : querySubstitutions) { final int parameterIndex = substitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[substitution.getMethodParameterIndex()]; String parameterValue = serialize(serializer, methodArgument); if (parameterValue != null) { if (substitution.shouldEncode()) { parameterValue = UrlEscapers.QUERY_ESCAPER.escape(parameterValue); } urlBuilder.setQueryParameter(substitution.getUrlParameterName(), parameterValue); } } } } /** * Sets the headers that have been added to this value based on the provided method arguments into the passed * {@link HttpHeaders}. * * @param swaggerMethodArguments The arguments that will be used to create the headers' values. * @param httpHeaders The {@link HttpHeaders} where the header values will be set. */ public void setHeaders(Object[] swaggerMethodArguments, HttpHeaders httpHeaders) { for (HttpHeader header : headers) { httpHeaders.set(header.getName(), header.getValuesList()); } if (swaggerMethodArguments == null) { return; } for (Substitution headerSubstitution : headerSubstitutions) { final int parameterIndex = headerSubstitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[headerSubstitution.getMethodParameterIndex()]; if (methodArgument instanceof Map) { @SuppressWarnings("unchecked") final Map<String, ?> headerCollection = (Map<String, ?>) methodArgument; final String headerCollectionPrefix = headerSubstitution.getUrlParameterName(); for (final Map.Entry<String, ?> headerCollectionEntry : headerCollection.entrySet()) { final String headerName = headerCollectionPrefix + headerCollectionEntry.getKey(); final String headerValue = serialize(serializer, headerCollectionEntry.getValue()); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } else { final String headerName = headerSubstitution.getUrlParameterName(); final String headerValue = serialize(serializer, methodArgument); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } } } /** * Get the {@link Context} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the context, or {@link Context */ public Context setContext(Object[] swaggerMethodArguments) { Context context = CoreUtils.findFirstOfType(swaggerMethodArguments, Context.class); return (context != null) ? context : Context.NONE; } /** * Get the {@link RequestOptions} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the request options */ /** * Get whether or not the provided response status code is one of the expected status codes for this Swagger * method. * * 1. If the returned int[] is null, then all 2XX status codes are considered as success code. * 2. If the returned int[] is not-null, only the codes in the array are considered as success code. * * @param statusCode the status code that was returned in the HTTP response * @return whether or not the provided response status code is one of the expected status codes for this Swagger * method */ @Override public boolean isExpectedResponseStatusCode(final int statusCode) { return expectedStatusCodes == null ? statusCode < 400 : expectedStatusCodes.get(statusCode); } /** * Get the {@link UnexpectedExceptionInformation} that will be used to generate a RestException if the HTTP response * status code is not one of the expected status codes. * * If an UnexpectedExceptionInformation is not found for the status code the default UnexpectedExceptionInformation * will be returned. * * @param code Exception HTTP status code return from a REST API. * @return the UnexpectedExceptionInformation to generate an exception to throw or return. */ @Override public UnexpectedExceptionInformation getUnexpectedException(int code) { if (exceptionMapping == null) { exceptionMapping = processUnexpectedResponseExceptionTypes(); } return exceptionMapping.getOrDefault(code, defaultException); } /** * Get the object to be used as the value of the HTTP request. * * @param swaggerMethodArguments the method arguments to get the value object from * @return the object that will be used as the body of the HTTP request */ public Object setBody(Object[] swaggerMethodArguments) { Object result = null; if (bodyContentMethodParameterIndex != null && swaggerMethodArguments != null && 0 <= bodyContentMethodParameterIndex && bodyContentMethodParameterIndex < swaggerMethodArguments.length) { result = swaggerMethodArguments[bodyContentMethodParameterIndex]; } if (!CoreUtils.isNullOrEmpty(formSubstitutions) && swaggerMethodArguments != null) { result = formSubstitutions.stream() .map(substitution -> serializeFormData(serializer, substitution.getUrlParameterName(), swaggerMethodArguments[substitution.getMethodParameterIndex()], substitution.shouldEncode())) .filter(Objects::nonNull) .collect(Collectors.joining("&")); } return result; } /** * Get the Content-Type of the body of this Swagger method. * * @return the Content-Type of the body of this Swagger method */ public String getBodyContentType() { return bodyContentType; } /** * Get the return type for the method that this object describes. * * @return the return type for the method that this object describes. */ @Override public Type getReturnType() { return returnType; } /** * Get the type of the body parameter to this method, if present. * * @return the return type of the body parameter to this method */ public Type getBodyJavaType() { return bodyJavaType; } /** * Get the type that the return value will be send across the network as. If returnValueWireType is not null, then * the raw HTTP response body will need to parsed to this type and then converted to the actual returnType. * * @return the type that the raw HTTP response body will be sent as */ @Override public Type getReturnValueWireType() { return returnValueWireType; } private static String serialize(SerializerAdapter serializer, Object value) { if (value == null) { return null; } if (value instanceof String) { return (String) value; } else { return serializer.serializeRaw(value); } } private static String serializeFormData(SerializerAdapter serializer, String key, Object value, boolean shouldEncode) { if (value == null) { return null; } String encodedKey = UrlEscapers.FORM_ESCAPER.escape(key); if (value instanceof List<?>) { return ((List<?>) value).stream() .map(element -> serializeAndEncodeFormValue(serializer, element, shouldEncode)) .filter(Objects::nonNull) .map(formValue -> encodedKey + "=" + formValue) .collect(Collectors.joining("&")); } else { return encodedKey + "=" + serializeAndEncodeFormValue(serializer, value, shouldEncode); } } private static String serializeAndEncodeFormValue(SerializerAdapter serializer, Object value, boolean shouldEncode) { if (value == null) { return null; } String serializedValue = serializer.serializeRaw(value); return shouldEncode ? UrlEscapers.FORM_ESCAPER.escape(serializedValue) : serializedValue; } private String applySubstitutions(String originalValue, Iterable<Substitution> substitutions, Object[] methodArguments) { String result = originalValue; if (methodArguments != null) { for (Substitution substitution : substitutions) { final int substitutionParameterIndex = substitution.getMethodParameterIndex(); if (0 <= substitutionParameterIndex && substitutionParameterIndex < methodArguments.length) { final Object methodArgument = methodArguments[substitutionParameterIndex]; String substitutionValue = serialize(serializer, methodArgument); if (substitutionValue != null && !substitutionValue.isEmpty() && substitution.shouldEncode()) { substitutionValue = UrlEscapers.PATH_ESCAPER.escape(substitutionValue); } if (substitutionValue == null) { substitutionValue = ""; } result = result.replace("{" + substitution.getUrlParameterName() + "}", substitutionValue); } } } return result; } private Map<Integer, UnexpectedExceptionInformation> processUnexpectedResponseExceptionTypes() { HashMap<Integer, UnexpectedExceptionInformation> exceptionHashMap = new HashMap<>(); for (UnexpectedResponseExceptionType exceptionAnnotation : unexpectedResponseExceptionTypes) { UnexpectedExceptionInformation exception = new UnexpectedExceptionInformation(exceptionAnnotation.value()); if (exceptionAnnotation.code().length == 0) { defaultException = exception; } else { for (int statusCode : exceptionAnnotation.code()) { exceptionHashMap.put(statusCode, exception); } } } if (defaultException == null) { defaultException = new UnexpectedExceptionInformation(HttpResponseException.class); } return exceptionHashMap; } }
class SwaggerMethodParser implements HttpResponseDecodeData { private static final Pattern PATTERN_COLON_SLASH_SLASH = Pattern.compile(": private final SerializerAdapter serializer; private final String rawHost; private final String fullyQualifiedMethodName; private final HttpMethod httpMethod; private final String relativePath; private final List<Substitution> hostSubstitutions = new ArrayList<>(); private final List<Substitution> pathSubstitutions = new ArrayList<>(); private final List<Substitution> querySubstitutions = new ArrayList<>(); private final List<Substitution> formSubstitutions = new ArrayList<>(); private final List<Substitution> headerSubstitutions = new ArrayList<>(); private final HttpHeaders headers = new HttpHeaders(); private final Integer bodyContentMethodParameterIndex; private final String bodyContentType; private final Type bodyJavaType; private final BitSet expectedStatusCodes; private final Type returnType; private final Type returnValueWireType; private final UnexpectedResponseExceptionType[] unexpectedResponseExceptionTypes; private Map<Integer, UnexpectedExceptionInformation> exceptionMapping; private UnexpectedExceptionInformation defaultException; /** * Create a SwaggerMethodParser object using the provided fully qualified method name. * * @param swaggerMethod the Swagger method to parse. * @param rawHost the raw host value from the @Host annotation. Before this can be used as the host value in an HTTP * request, it must be processed through the possible host substitutions. */ SwaggerMethodParser(Method swaggerMethod, String rawHost) { this(swaggerMethod, rawHost, JacksonAdapter.createDefaultSerializerAdapter()); } SwaggerMethodParser(Method swaggerMethod, String rawHost, SerializerAdapter serializer) { this.serializer = serializer; this.rawHost = rawHost; final Class<?> swaggerInterface = swaggerMethod.getDeclaringClass(); fullyQualifiedMethodName = swaggerInterface.getName() + "." + swaggerMethod.getName(); if (swaggerMethod.isAnnotationPresent(Get.class)) { this.httpMethod = HttpMethod.GET; this.relativePath = swaggerMethod.getAnnotation(Get.class).value(); } else if (swaggerMethod.isAnnotationPresent(Put.class)) { this.httpMethod = HttpMethod.PUT; this.relativePath = swaggerMethod.getAnnotation(Put.class).value(); } else if (swaggerMethod.isAnnotationPresent(Head.class)) { this.httpMethod = HttpMethod.HEAD; this.relativePath = swaggerMethod.getAnnotation(Head.class).value(); } else if (swaggerMethod.isAnnotationPresent(Delete.class)) { this.httpMethod = HttpMethod.DELETE; this.relativePath = swaggerMethod.getAnnotation(Delete.class).value(); } else if (swaggerMethod.isAnnotationPresent(Post.class)) { this.httpMethod = HttpMethod.POST; this.relativePath = swaggerMethod.getAnnotation(Post.class).value(); } else if (swaggerMethod.isAnnotationPresent(Patch.class)) { this.httpMethod = HttpMethod.PATCH; this.relativePath = swaggerMethod.getAnnotation(Patch.class).value(); } else { throw new MissingRequiredAnnotationException(Arrays.asList(Get.class, Put.class, Head.class, Delete.class, Post.class, Patch.class), swaggerMethod); } returnType = swaggerMethod.getGenericReturnType(); final ReturnValueWireType returnValueWireTypeAnnotation = swaggerMethod.getAnnotation(ReturnValueWireType.class); if (returnValueWireTypeAnnotation != null) { Class<?> returnValueWireType = returnValueWireTypeAnnotation.value(); if (returnValueWireType == Base64Url.class || returnValueWireType == UnixTime.class || returnValueWireType == DateTimeRfc1123.class) { this.returnValueWireType = returnValueWireType; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, List.class)) { this.returnValueWireType = returnValueWireType.getGenericInterfaces()[0]; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, Page.class)) { this.returnValueWireType = returnValueWireType; } else { this.returnValueWireType = null; } } else { this.returnValueWireType = null; } if (swaggerMethod.isAnnotationPresent(Headers.class)) { final Headers headersAnnotation = swaggerMethod.getAnnotation(Headers.class); final String[] headers = headersAnnotation.value(); for (final String header : headers) { final int colonIndex = header.indexOf(":"); if (colonIndex >= 0) { final String headerName = header.substring(0, colonIndex).trim(); if (!headerName.isEmpty()) { final String headerValue = header.substring(colonIndex + 1).trim(); if (!headerValue.isEmpty()) { if (headerValue.contains(",")) { this.headers.set(headerName, Arrays.asList(headerValue.split(","))); } else { this.headers.set(headerName, headerValue); } } } } } } final ExpectedResponses expectedResponses = swaggerMethod.getAnnotation(ExpectedResponses.class); if (expectedResponses != null && expectedResponses.value().length > 0) { expectedStatusCodes = new BitSet(); for (int code : expectedResponses.value()) { expectedStatusCodes.set(code); } } else { expectedStatusCodes = null; } unexpectedResponseExceptionTypes = swaggerMethod.getAnnotationsByType(UnexpectedResponseExceptionType.class); Integer bodyContentMethodParameterIndex = null; String bodyContentType = null; Type bodyJavaType = null; final Annotation[][] allParametersAnnotations = swaggerMethod.getParameterAnnotations(); for (int parameterIndex = 0; parameterIndex < allParametersAnnotations.length; ++parameterIndex) { final Annotation[] parameterAnnotations = swaggerMethod.getParameterAnnotations()[parameterIndex]; for (final Annotation annotation : parameterAnnotations) { final Class<? extends Annotation> annotationType = annotation.annotationType(); if (annotationType.equals(HostParam.class)) { final HostParam hostParamAnnotation = (HostParam) annotation; hostSubstitutions.add(new Substitution(hostParamAnnotation.value(), parameterIndex, !hostParamAnnotation.encoded())); } else if (annotationType.equals(PathParam.class)) { final PathParam pathParamAnnotation = (PathParam) annotation; pathSubstitutions.add(new Substitution(pathParamAnnotation.value(), parameterIndex, !pathParamAnnotation.encoded())); } else if (annotationType.equals(QueryParam.class)) { final QueryParam queryParamAnnotation = (QueryParam) annotation; querySubstitutions.add(new Substitution(queryParamAnnotation.value(), parameterIndex, !queryParamAnnotation.encoded())); } else if (annotationType.equals(HeaderParam.class)) { final HeaderParam headerParamAnnotation = (HeaderParam) annotation; headerSubstitutions.add(new Substitution(headerParamAnnotation.value(), parameterIndex, false)); } else if (annotationType.equals(BodyParam.class)) { final BodyParam bodyParamAnnotation = (BodyParam) annotation; bodyContentMethodParameterIndex = parameterIndex; bodyContentType = bodyParamAnnotation.value(); bodyJavaType = swaggerMethod.getGenericParameterTypes()[parameterIndex]; } else if (annotationType.equals(FormParam.class)) { final FormParam formParamAnnotation = (FormParam) annotation; formSubstitutions.add(new Substitution(formParamAnnotation.value(), parameterIndex, !formParamAnnotation.encoded())); bodyContentType = ContentType.APPLICATION_X_WWW_FORM_URLENCODED; bodyJavaType = String.class; } } } this.bodyContentMethodParameterIndex = bodyContentMethodParameterIndex; this.bodyContentType = bodyContentType; this.bodyJavaType = bodyJavaType; } /** * Get the fully qualified method that was called to invoke this HTTP request. * * @return the fully qualified method that was called to invoke this HTTP request */ public String getFullyQualifiedMethodName() { return fullyQualifiedMethodName; } /** * Get the HTTP method that will be used to complete the Swagger method's request. * * @return the HTTP method that will be used to complete the Swagger method's request */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Sets the scheme and host to use for HTTP requests for this Swagger method. * * @param swaggerMethodArguments The arguments to use for scheme and host substitutions. * @param urlBuilder The {@link UrlBuilder} that will have its scheme and host set. */ public void setSchemeAndHost(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { final String substitutedHost = applySubstitutions(rawHost, hostSubstitutions, swaggerMethodArguments); final String[] substitutedHostParts = PATTERN_COLON_SLASH_SLASH.split(substitutedHost); if (substitutedHostParts.length >= 2) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHostParts[1]); } else if (substitutedHostParts.length == 1) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHost); } else { urlBuilder.setHost(substitutedHost); } } /** * Get the path that will be used to complete the Swagger method's request. * * @param methodArguments the method arguments to use with the path substitutions * @return the path value with its placeholders replaced by the matching substitutions */ public String setPath(Object[] methodArguments) { return applySubstitutions(relativePath, pathSubstitutions, methodArguments); } /** * Sets the encoded query parameters that have been added to this value based on the provided method arguments into * the passed {@link UrlBuilder}. * * @param swaggerMethodArguments the arguments that will be used to create the query parameters' values * @param urlBuilder The {@link UrlBuilder} where the encoded query parameters will be set. */ public void setEncodedQueryParameters(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { if (swaggerMethodArguments == null) { return; } for (Substitution substitution : querySubstitutions) { final int parameterIndex = substitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[substitution.getMethodParameterIndex()]; String parameterValue = serialize(serializer, methodArgument); if (parameterValue != null) { if (substitution.shouldEncode()) { parameterValue = UrlEscapers.QUERY_ESCAPER.escape(parameterValue); } urlBuilder.setQueryParameter(substitution.getUrlParameterName(), parameterValue); } } } } /** * Sets the headers that have been added to this value based on the provided method arguments into the passed * {@link HttpHeaders}. * * @param swaggerMethodArguments The arguments that will be used to create the headers' values. * @param httpHeaders The {@link HttpHeaders} where the header values will be set. */ public void setHeaders(Object[] swaggerMethodArguments, HttpHeaders httpHeaders) { for (HttpHeader header : headers) { httpHeaders.set(header.getName(), header.getValuesList()); } if (swaggerMethodArguments == null) { return; } for (Substitution headerSubstitution : headerSubstitutions) { final int parameterIndex = headerSubstitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[headerSubstitution.getMethodParameterIndex()]; if (methodArgument instanceof Map) { @SuppressWarnings("unchecked") final Map<String, ?> headerCollection = (Map<String, ?>) methodArgument; final String headerCollectionPrefix = headerSubstitution.getUrlParameterName(); for (final Map.Entry<String, ?> headerCollectionEntry : headerCollection.entrySet()) { final String headerName = headerCollectionPrefix + headerCollectionEntry.getKey(); final String headerValue = serialize(serializer, headerCollectionEntry.getValue()); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } else { final String headerName = headerSubstitution.getUrlParameterName(); final String headerValue = serialize(serializer, methodArgument); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } } } /** * Get the {@link Context} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the context, or {@link Context */ public Context setContext(Object[] swaggerMethodArguments) { Context context = CoreUtils.findFirstOfType(swaggerMethodArguments, Context.class); return (context != null) ? context : Context.NONE; } /** * Get the {@link RequestOptions} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the request options */ /** * Get whether or not the provided response status code is one of the expected status codes for this Swagger * method. * * 1. If the returned int[] is null, then all 2XX status codes are considered as success code. * 2. If the returned int[] is not-null, only the codes in the array are considered as success code. * * @param statusCode the status code that was returned in the HTTP response * @return whether or not the provided response status code is one of the expected status codes for this Swagger * method */ @Override public boolean isExpectedResponseStatusCode(final int statusCode) { return expectedStatusCodes == null ? statusCode < 400 : expectedStatusCodes.get(statusCode); } /** * Get the {@link UnexpectedExceptionInformation} that will be used to generate a RestException if the HTTP response * status code is not one of the expected status codes. * * If an UnexpectedExceptionInformation is not found for the status code the default UnexpectedExceptionInformation * will be returned. * * @param code Exception HTTP status code return from a REST API. * @return the UnexpectedExceptionInformation to generate an exception to throw or return. */ @Override public UnexpectedExceptionInformation getUnexpectedException(int code) { if (exceptionMapping == null) { exceptionMapping = processUnexpectedResponseExceptionTypes(); } return exceptionMapping.getOrDefault(code, defaultException); } /** * Get the object to be used as the value of the HTTP request. * * @param swaggerMethodArguments the method arguments to get the value object from * @return the object that will be used as the body of the HTTP request */ public Object setBody(Object[] swaggerMethodArguments) { Object result = null; if (bodyContentMethodParameterIndex != null && swaggerMethodArguments != null && 0 <= bodyContentMethodParameterIndex && bodyContentMethodParameterIndex < swaggerMethodArguments.length) { result = swaggerMethodArguments[bodyContentMethodParameterIndex]; } if (!CoreUtils.isNullOrEmpty(formSubstitutions) && swaggerMethodArguments != null) { result = formSubstitutions.stream() .map(substitution -> serializeFormData(serializer, substitution.getUrlParameterName(), swaggerMethodArguments[substitution.getMethodParameterIndex()], substitution.shouldEncode())) .filter(Objects::nonNull) .collect(Collectors.joining("&")); } return result; } /** * Get the Content-Type of the body of this Swagger method. * * @return the Content-Type of the body of this Swagger method */ public String getBodyContentType() { return bodyContentType; } /** * Get the return type for the method that this object describes. * * @return the return type for the method that this object describes. */ @Override public Type getReturnType() { return returnType; } /** * Get the type of the body parameter to this method, if present. * * @return the return type of the body parameter to this method */ public Type getBodyJavaType() { return bodyJavaType; } /** * Get the type that the return value will be send across the network as. If returnValueWireType is not null, then * the raw HTTP response body will need to parsed to this type and then converted to the actual returnType. * * @return the type that the raw HTTP response body will be sent as */ @Override public Type getReturnValueWireType() { return returnValueWireType; } private static String serialize(SerializerAdapter serializer, Object value) { if (value == null) { return null; } if (value instanceof String) { return (String) value; } else { return serializer.serializeRaw(value); } } private static String serializeFormData(SerializerAdapter serializer, String key, Object value, boolean shouldEncode) { if (value == null) { return null; } String encodedKey = UrlEscapers.FORM_ESCAPER.escape(key); if (value instanceof List<?>) { return ((List<?>) value).stream() .map(element -> serializeAndEncodeFormValue(serializer, element, shouldEncode)) .filter(Objects::nonNull) .map(formValue -> encodedKey + "=" + formValue) .collect(Collectors.joining("&")); } else { return encodedKey + "=" + serializeAndEncodeFormValue(serializer, value, shouldEncode); } } private static String serializeAndEncodeFormValue(SerializerAdapter serializer, Object value, boolean shouldEncode) { if (value == null) { return null; } String serializedValue = serializer.serializeRaw(value); return shouldEncode ? UrlEscapers.FORM_ESCAPER.escape(serializedValue) : serializedValue; } private String applySubstitutions(String originalValue, Iterable<Substitution> substitutions, Object[] methodArguments) { String result = originalValue; if (methodArguments != null) { for (Substitution substitution : substitutions) { final int substitutionParameterIndex = substitution.getMethodParameterIndex(); if (0 <= substitutionParameterIndex && substitutionParameterIndex < methodArguments.length) { final Object methodArgument = methodArguments[substitutionParameterIndex]; String substitutionValue = serialize(serializer, methodArgument); if (substitutionValue != null && !substitutionValue.isEmpty() && substitution.shouldEncode()) { substitutionValue = UrlEscapers.PATH_ESCAPER.escape(substitutionValue); } if (substitutionValue == null) { substitutionValue = ""; } result = result.replace("{" + substitution.getUrlParameterName() + "}", substitutionValue); } } } return result; } private Map<Integer, UnexpectedExceptionInformation> processUnexpectedResponseExceptionTypes() { HashMap<Integer, UnexpectedExceptionInformation> exceptionHashMap = new HashMap<>(); for (UnexpectedResponseExceptionType exceptionAnnotation : unexpectedResponseExceptionTypes) { UnexpectedExceptionInformation exception = new UnexpectedExceptionInformation(exceptionAnnotation.value()); if (exceptionAnnotation.code().length == 0) { defaultException = exception; } else { for (int statusCode : exceptionAnnotation.code()) { exceptionHashMap.put(statusCode, exception); } } } if (defaultException == null) { defaultException = new UnexpectedExceptionInformation(HttpResponseException.class); } return exceptionHashMap; } }
AutoRest generated code does, but I'm not sure if azure-core could make the assumption that all code follows the same order. However, a `CoreUtils.findLastOfType()` might slightly improve the perf.
public RequestOptions setRequestOptions(Object[] swaggerMethodArguments) { return CoreUtils.findFirstOfType(swaggerMethodArguments, RequestOptions.class); }
return CoreUtils.findFirstOfType(swaggerMethodArguments, RequestOptions.class);
public RequestOptions setRequestOptions(Object[] swaggerMethodArguments) { return CoreUtils.findFirstOfType(swaggerMethodArguments, RequestOptions.class); }
class SwaggerMethodParser implements HttpResponseDecodeData { private static final Pattern PATTERN_COLON_SLASH_SLASH = Pattern.compile(": private final SerializerAdapter serializer; private final String rawHost; private final String fullyQualifiedMethodName; private final HttpMethod httpMethod; private final String relativePath; private final List<Substitution> hostSubstitutions = new ArrayList<>(); private final List<Substitution> pathSubstitutions = new ArrayList<>(); private final List<Substitution> querySubstitutions = new ArrayList<>(); private final List<Substitution> formSubstitutions = new ArrayList<>(); private final List<Substitution> headerSubstitutions = new ArrayList<>(); private final HttpHeaders headers = new HttpHeaders(); private final Integer bodyContentMethodParameterIndex; private final String bodyContentType; private final Type bodyJavaType; private final BitSet expectedStatusCodes; private final Type returnType; private final Type returnValueWireType; private final UnexpectedResponseExceptionType[] unexpectedResponseExceptionTypes; private Map<Integer, UnexpectedExceptionInformation> exceptionMapping; private UnexpectedExceptionInformation defaultException; /** * Create a SwaggerMethodParser object using the provided fully qualified method name. * * @param swaggerMethod the Swagger method to parse. * @param rawHost the raw host value from the @Host annotation. Before this can be used as the host value in an HTTP * request, it must be processed through the possible host substitutions. */ SwaggerMethodParser(Method swaggerMethod, String rawHost) { this(swaggerMethod, rawHost, JacksonAdapter.createDefaultSerializerAdapter()); } SwaggerMethodParser(Method swaggerMethod, String rawHost, SerializerAdapter serializer) { this.serializer = serializer; this.rawHost = rawHost; final Class<?> swaggerInterface = swaggerMethod.getDeclaringClass(); fullyQualifiedMethodName = swaggerInterface.getName() + "." + swaggerMethod.getName(); if (swaggerMethod.isAnnotationPresent(Get.class)) { this.httpMethod = HttpMethod.GET; this.relativePath = swaggerMethod.getAnnotation(Get.class).value(); } else if (swaggerMethod.isAnnotationPresent(Put.class)) { this.httpMethod = HttpMethod.PUT; this.relativePath = swaggerMethod.getAnnotation(Put.class).value(); } else if (swaggerMethod.isAnnotationPresent(Head.class)) { this.httpMethod = HttpMethod.HEAD; this.relativePath = swaggerMethod.getAnnotation(Head.class).value(); } else if (swaggerMethod.isAnnotationPresent(Delete.class)) { this.httpMethod = HttpMethod.DELETE; this.relativePath = swaggerMethod.getAnnotation(Delete.class).value(); } else if (swaggerMethod.isAnnotationPresent(Post.class)) { this.httpMethod = HttpMethod.POST; this.relativePath = swaggerMethod.getAnnotation(Post.class).value(); } else if (swaggerMethod.isAnnotationPresent(Patch.class)) { this.httpMethod = HttpMethod.PATCH; this.relativePath = swaggerMethod.getAnnotation(Patch.class).value(); } else { throw new MissingRequiredAnnotationException(Arrays.asList(Get.class, Put.class, Head.class, Delete.class, Post.class, Patch.class), swaggerMethod); } returnType = swaggerMethod.getGenericReturnType(); final ReturnValueWireType returnValueWireTypeAnnotation = swaggerMethod.getAnnotation(ReturnValueWireType.class); if (returnValueWireTypeAnnotation != null) { Class<?> returnValueWireType = returnValueWireTypeAnnotation.value(); if (returnValueWireType == Base64Url.class || returnValueWireType == UnixTime.class || returnValueWireType == DateTimeRfc1123.class) { this.returnValueWireType = returnValueWireType; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, List.class)) { this.returnValueWireType = returnValueWireType.getGenericInterfaces()[0]; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, Page.class)) { this.returnValueWireType = returnValueWireType; } else { this.returnValueWireType = null; } } else { this.returnValueWireType = null; } if (swaggerMethod.isAnnotationPresent(Headers.class)) { final Headers headersAnnotation = swaggerMethod.getAnnotation(Headers.class); final String[] headers = headersAnnotation.value(); for (final String header : headers) { final int colonIndex = header.indexOf(":"); if (colonIndex >= 0) { final String headerName = header.substring(0, colonIndex).trim(); if (!headerName.isEmpty()) { final String headerValue = header.substring(colonIndex + 1).trim(); if (!headerValue.isEmpty()) { if (headerValue.contains(",")) { this.headers.set(headerName, Arrays.asList(headerValue.split(","))); } else { this.headers.set(headerName, headerValue); } } } } } } final ExpectedResponses expectedResponses = swaggerMethod.getAnnotation(ExpectedResponses.class); if (expectedResponses != null && expectedResponses.value().length > 0) { expectedStatusCodes = new BitSet(); for (int code : expectedResponses.value()) { expectedStatusCodes.set(code); } } else { expectedStatusCodes = null; } unexpectedResponseExceptionTypes = swaggerMethod.getAnnotationsByType(UnexpectedResponseExceptionType.class); Integer bodyContentMethodParameterIndex = null; String bodyContentType = null; Type bodyJavaType = null; final Annotation[][] allParametersAnnotations = swaggerMethod.getParameterAnnotations(); for (int parameterIndex = 0; parameterIndex < allParametersAnnotations.length; ++parameterIndex) { final Annotation[] parameterAnnotations = swaggerMethod.getParameterAnnotations()[parameterIndex]; for (final Annotation annotation : parameterAnnotations) { final Class<? extends Annotation> annotationType = annotation.annotationType(); if (annotationType.equals(HostParam.class)) { final HostParam hostParamAnnotation = (HostParam) annotation; hostSubstitutions.add(new Substitution(hostParamAnnotation.value(), parameterIndex, !hostParamAnnotation.encoded())); } else if (annotationType.equals(PathParam.class)) { final PathParam pathParamAnnotation = (PathParam) annotation; pathSubstitutions.add(new Substitution(pathParamAnnotation.value(), parameterIndex, !pathParamAnnotation.encoded())); } else if (annotationType.equals(QueryParam.class)) { final QueryParam queryParamAnnotation = (QueryParam) annotation; querySubstitutions.add(new Substitution(queryParamAnnotation.value(), parameterIndex, !queryParamAnnotation.encoded())); } else if (annotationType.equals(HeaderParam.class)) { final HeaderParam headerParamAnnotation = (HeaderParam) annotation; headerSubstitutions.add(new Substitution(headerParamAnnotation.value(), parameterIndex, false)); } else if (annotationType.equals(BodyParam.class)) { final BodyParam bodyParamAnnotation = (BodyParam) annotation; bodyContentMethodParameterIndex = parameterIndex; bodyContentType = bodyParamAnnotation.value(); bodyJavaType = swaggerMethod.getGenericParameterTypes()[parameterIndex]; } else if (annotationType.equals(FormParam.class)) { final FormParam formParamAnnotation = (FormParam) annotation; formSubstitutions.add(new Substitution(formParamAnnotation.value(), parameterIndex, !formParamAnnotation.encoded())); bodyContentType = ContentType.APPLICATION_X_WWW_FORM_URLENCODED; bodyJavaType = String.class; } } } this.bodyContentMethodParameterIndex = bodyContentMethodParameterIndex; this.bodyContentType = bodyContentType; this.bodyJavaType = bodyJavaType; } /** * Get the fully qualified method that was called to invoke this HTTP request. * * @return the fully qualified method that was called to invoke this HTTP request */ public String getFullyQualifiedMethodName() { return fullyQualifiedMethodName; } /** * Get the HTTP method that will be used to complete the Swagger method's request. * * @return the HTTP method that will be used to complete the Swagger method's request */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Sets the scheme and host to use for HTTP requests for this Swagger method. * * @param swaggerMethodArguments The arguments to use for scheme and host substitutions. * @param urlBuilder The {@link UrlBuilder} that will have its scheme and host set. */ public void setSchemeAndHost(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { final String substitutedHost = applySubstitutions(rawHost, hostSubstitutions, swaggerMethodArguments); final String[] substitutedHostParts = PATTERN_COLON_SLASH_SLASH.split(substitutedHost); if (substitutedHostParts.length >= 2) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHostParts[1]); } else if (substitutedHostParts.length == 1) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHost); } else { urlBuilder.setHost(substitutedHost); } } /** * Get the path that will be used to complete the Swagger method's request. * * @param methodArguments the method arguments to use with the path substitutions * @return the path value with its placeholders replaced by the matching substitutions */ public String setPath(Object[] methodArguments) { return applySubstitutions(relativePath, pathSubstitutions, methodArguments); } /** * Sets the encoded query parameters that have been added to this value based on the provided method arguments into * the passed {@link UrlBuilder}. * * @param swaggerMethodArguments the arguments that will be used to create the query parameters' values * @param urlBuilder The {@link UrlBuilder} where the encoded query parameters will be set. */ public void setEncodedQueryParameters(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { if (swaggerMethodArguments == null) { return; } for (Substitution substitution : querySubstitutions) { final int parameterIndex = substitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[substitution.getMethodParameterIndex()]; String parameterValue = serialize(serializer, methodArgument); if (parameterValue != null) { if (substitution.shouldEncode()) { parameterValue = UrlEscapers.QUERY_ESCAPER.escape(parameterValue); } urlBuilder.setQueryParameter(substitution.getUrlParameterName(), parameterValue); } } } } /** * Sets the headers that have been added to this value based on the provided method arguments into the passed * {@link HttpHeaders}. * * @param swaggerMethodArguments The arguments that will be used to create the headers' values. * @param httpHeaders The {@link HttpHeaders} where the header values will be set. */ public void setHeaders(Object[] swaggerMethodArguments, HttpHeaders httpHeaders) { for (HttpHeader header : headers) { httpHeaders.set(header.getName(), header.getValuesList()); } if (swaggerMethodArguments == null) { return; } for (Substitution headerSubstitution : headerSubstitutions) { final int parameterIndex = headerSubstitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[headerSubstitution.getMethodParameterIndex()]; if (methodArgument instanceof Map) { @SuppressWarnings("unchecked") final Map<String, ?> headerCollection = (Map<String, ?>) methodArgument; final String headerCollectionPrefix = headerSubstitution.getUrlParameterName(); for (final Map.Entry<String, ?> headerCollectionEntry : headerCollection.entrySet()) { final String headerName = headerCollectionPrefix + headerCollectionEntry.getKey(); final String headerValue = serialize(serializer, headerCollectionEntry.getValue()); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } else { final String headerName = headerSubstitution.getUrlParameterName(); final String headerValue = serialize(serializer, methodArgument); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } } } /** * Get the {@link Context} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the context, or {@link Context */ public Context setContext(Object[] swaggerMethodArguments) { Context context = CoreUtils.findFirstOfType(swaggerMethodArguments, Context.class); return (context != null) ? context : Context.NONE; } /** * Get the {@link RequestOptions} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the request options */ /** * Get whether or not the provided response status code is one of the expected status codes for this Swagger * method. * * 1. If the returned int[] is null, then all 2XX status codes are considered as success code. * 2. If the returned int[] is not-null, only the codes in the array are considered as success code. * * @param statusCode the status code that was returned in the HTTP response * @return whether or not the provided response status code is one of the expected status codes for this Swagger * method */ @Override public boolean isExpectedResponseStatusCode(final int statusCode) { return expectedStatusCodes == null ? statusCode < 400 : expectedStatusCodes.get(statusCode); } /** * Get the {@link UnexpectedExceptionInformation} that will be used to generate a RestException if the HTTP response * status code is not one of the expected status codes. * * If an UnexpectedExceptionInformation is not found for the status code the default UnexpectedExceptionInformation * will be returned. * * @param code Exception HTTP status code return from a REST API. * @return the UnexpectedExceptionInformation to generate an exception to throw or return. */ @Override public UnexpectedExceptionInformation getUnexpectedException(int code) { if (exceptionMapping == null) { exceptionMapping = processUnexpectedResponseExceptionTypes(); } return exceptionMapping.getOrDefault(code, defaultException); } /** * Get the object to be used as the value of the HTTP request. * * @param swaggerMethodArguments the method arguments to get the value object from * @return the object that will be used as the body of the HTTP request */ public Object setBody(Object[] swaggerMethodArguments) { Object result = null; if (bodyContentMethodParameterIndex != null && swaggerMethodArguments != null && 0 <= bodyContentMethodParameterIndex && bodyContentMethodParameterIndex < swaggerMethodArguments.length) { result = swaggerMethodArguments[bodyContentMethodParameterIndex]; } if (!CoreUtils.isNullOrEmpty(formSubstitutions) && swaggerMethodArguments != null) { result = formSubstitutions.stream() .map(substitution -> serializeFormData(serializer, substitution.getUrlParameterName(), swaggerMethodArguments[substitution.getMethodParameterIndex()], substitution.shouldEncode())) .filter(Objects::nonNull) .collect(Collectors.joining("&")); } return result; } /** * Get the Content-Type of the body of this Swagger method. * * @return the Content-Type of the body of this Swagger method */ public String getBodyContentType() { return bodyContentType; } /** * Get the return type for the method that this object describes. * * @return the return type for the method that this object describes. */ @Override public Type getReturnType() { return returnType; } /** * Get the type of the body parameter to this method, if present. * * @return the return type of the body parameter to this method */ public Type getBodyJavaType() { return bodyJavaType; } /** * Get the type that the return value will be send across the network as. If returnValueWireType is not null, then * the raw HTTP response body will need to parsed to this type and then converted to the actual returnType. * * @return the type that the raw HTTP response body will be sent as */ @Override public Type getReturnValueWireType() { return returnValueWireType; } private static String serialize(SerializerAdapter serializer, Object value) { if (value == null) { return null; } if (value instanceof String) { return (String) value; } else { return serializer.serializeRaw(value); } } private static String serializeFormData(SerializerAdapter serializer, String key, Object value, boolean shouldEncode) { if (value == null) { return null; } String encodedKey = UrlEscapers.FORM_ESCAPER.escape(key); if (value instanceof List<?>) { return ((List<?>) value).stream() .map(element -> serializeAndEncodeFormValue(serializer, element, shouldEncode)) .filter(Objects::nonNull) .map(formValue -> encodedKey + "=" + formValue) .collect(Collectors.joining("&")); } else { return encodedKey + "=" + serializeAndEncodeFormValue(serializer, value, shouldEncode); } } private static String serializeAndEncodeFormValue(SerializerAdapter serializer, Object value, boolean shouldEncode) { if (value == null) { return null; } String serializedValue = serializer.serializeRaw(value); return shouldEncode ? UrlEscapers.FORM_ESCAPER.escape(serializedValue) : serializedValue; } private String applySubstitutions(String originalValue, Iterable<Substitution> substitutions, Object[] methodArguments) { String result = originalValue; if (methodArguments != null) { for (Substitution substitution : substitutions) { final int substitutionParameterIndex = substitution.getMethodParameterIndex(); if (0 <= substitutionParameterIndex && substitutionParameterIndex < methodArguments.length) { final Object methodArgument = methodArguments[substitutionParameterIndex]; String substitutionValue = serialize(serializer, methodArgument); if (substitutionValue != null && !substitutionValue.isEmpty() && substitution.shouldEncode()) { substitutionValue = UrlEscapers.PATH_ESCAPER.escape(substitutionValue); } if (substitutionValue == null) { substitutionValue = ""; } result = result.replace("{" + substitution.getUrlParameterName() + "}", substitutionValue); } } } return result; } private Map<Integer, UnexpectedExceptionInformation> processUnexpectedResponseExceptionTypes() { HashMap<Integer, UnexpectedExceptionInformation> exceptionHashMap = new HashMap<>(); for (UnexpectedResponseExceptionType exceptionAnnotation : unexpectedResponseExceptionTypes) { UnexpectedExceptionInformation exception = new UnexpectedExceptionInformation(exceptionAnnotation.value()); if (exceptionAnnotation.code().length == 0) { defaultException = exception; } else { for (int statusCode : exceptionAnnotation.code()) { exceptionHashMap.put(statusCode, exception); } } } if (defaultException == null) { defaultException = new UnexpectedExceptionInformation(HttpResponseException.class); } return exceptionHashMap; } }
class SwaggerMethodParser implements HttpResponseDecodeData { private static final Pattern PATTERN_COLON_SLASH_SLASH = Pattern.compile(": private final SerializerAdapter serializer; private final String rawHost; private final String fullyQualifiedMethodName; private final HttpMethod httpMethod; private final String relativePath; private final List<Substitution> hostSubstitutions = new ArrayList<>(); private final List<Substitution> pathSubstitutions = new ArrayList<>(); private final List<Substitution> querySubstitutions = new ArrayList<>(); private final List<Substitution> formSubstitutions = new ArrayList<>(); private final List<Substitution> headerSubstitutions = new ArrayList<>(); private final HttpHeaders headers = new HttpHeaders(); private final Integer bodyContentMethodParameterIndex; private final String bodyContentType; private final Type bodyJavaType; private final BitSet expectedStatusCodes; private final Type returnType; private final Type returnValueWireType; private final UnexpectedResponseExceptionType[] unexpectedResponseExceptionTypes; private Map<Integer, UnexpectedExceptionInformation> exceptionMapping; private UnexpectedExceptionInformation defaultException; /** * Create a SwaggerMethodParser object using the provided fully qualified method name. * * @param swaggerMethod the Swagger method to parse. * @param rawHost the raw host value from the @Host annotation. Before this can be used as the host value in an HTTP * request, it must be processed through the possible host substitutions. */ SwaggerMethodParser(Method swaggerMethod, String rawHost) { this(swaggerMethod, rawHost, JacksonAdapter.createDefaultSerializerAdapter()); } SwaggerMethodParser(Method swaggerMethod, String rawHost, SerializerAdapter serializer) { this.serializer = serializer; this.rawHost = rawHost; final Class<?> swaggerInterface = swaggerMethod.getDeclaringClass(); fullyQualifiedMethodName = swaggerInterface.getName() + "." + swaggerMethod.getName(); if (swaggerMethod.isAnnotationPresent(Get.class)) { this.httpMethod = HttpMethod.GET; this.relativePath = swaggerMethod.getAnnotation(Get.class).value(); } else if (swaggerMethod.isAnnotationPresent(Put.class)) { this.httpMethod = HttpMethod.PUT; this.relativePath = swaggerMethod.getAnnotation(Put.class).value(); } else if (swaggerMethod.isAnnotationPresent(Head.class)) { this.httpMethod = HttpMethod.HEAD; this.relativePath = swaggerMethod.getAnnotation(Head.class).value(); } else if (swaggerMethod.isAnnotationPresent(Delete.class)) { this.httpMethod = HttpMethod.DELETE; this.relativePath = swaggerMethod.getAnnotation(Delete.class).value(); } else if (swaggerMethod.isAnnotationPresent(Post.class)) { this.httpMethod = HttpMethod.POST; this.relativePath = swaggerMethod.getAnnotation(Post.class).value(); } else if (swaggerMethod.isAnnotationPresent(Patch.class)) { this.httpMethod = HttpMethod.PATCH; this.relativePath = swaggerMethod.getAnnotation(Patch.class).value(); } else { throw new MissingRequiredAnnotationException(Arrays.asList(Get.class, Put.class, Head.class, Delete.class, Post.class, Patch.class), swaggerMethod); } returnType = swaggerMethod.getGenericReturnType(); final ReturnValueWireType returnValueWireTypeAnnotation = swaggerMethod.getAnnotation(ReturnValueWireType.class); if (returnValueWireTypeAnnotation != null) { Class<?> returnValueWireType = returnValueWireTypeAnnotation.value(); if (returnValueWireType == Base64Url.class || returnValueWireType == UnixTime.class || returnValueWireType == DateTimeRfc1123.class) { this.returnValueWireType = returnValueWireType; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, List.class)) { this.returnValueWireType = returnValueWireType.getGenericInterfaces()[0]; } else if (TypeUtil.isTypeOrSubTypeOf(returnValueWireType, Page.class)) { this.returnValueWireType = returnValueWireType; } else { this.returnValueWireType = null; } } else { this.returnValueWireType = null; } if (swaggerMethod.isAnnotationPresent(Headers.class)) { final Headers headersAnnotation = swaggerMethod.getAnnotation(Headers.class); final String[] headers = headersAnnotation.value(); for (final String header : headers) { final int colonIndex = header.indexOf(":"); if (colonIndex >= 0) { final String headerName = header.substring(0, colonIndex).trim(); if (!headerName.isEmpty()) { final String headerValue = header.substring(colonIndex + 1).trim(); if (!headerValue.isEmpty()) { if (headerValue.contains(",")) { this.headers.set(headerName, Arrays.asList(headerValue.split(","))); } else { this.headers.set(headerName, headerValue); } } } } } } final ExpectedResponses expectedResponses = swaggerMethod.getAnnotation(ExpectedResponses.class); if (expectedResponses != null && expectedResponses.value().length > 0) { expectedStatusCodes = new BitSet(); for (int code : expectedResponses.value()) { expectedStatusCodes.set(code); } } else { expectedStatusCodes = null; } unexpectedResponseExceptionTypes = swaggerMethod.getAnnotationsByType(UnexpectedResponseExceptionType.class); Integer bodyContentMethodParameterIndex = null; String bodyContentType = null; Type bodyJavaType = null; final Annotation[][] allParametersAnnotations = swaggerMethod.getParameterAnnotations(); for (int parameterIndex = 0; parameterIndex < allParametersAnnotations.length; ++parameterIndex) { final Annotation[] parameterAnnotations = swaggerMethod.getParameterAnnotations()[parameterIndex]; for (final Annotation annotation : parameterAnnotations) { final Class<? extends Annotation> annotationType = annotation.annotationType(); if (annotationType.equals(HostParam.class)) { final HostParam hostParamAnnotation = (HostParam) annotation; hostSubstitutions.add(new Substitution(hostParamAnnotation.value(), parameterIndex, !hostParamAnnotation.encoded())); } else if (annotationType.equals(PathParam.class)) { final PathParam pathParamAnnotation = (PathParam) annotation; pathSubstitutions.add(new Substitution(pathParamAnnotation.value(), parameterIndex, !pathParamAnnotation.encoded())); } else if (annotationType.equals(QueryParam.class)) { final QueryParam queryParamAnnotation = (QueryParam) annotation; querySubstitutions.add(new Substitution(queryParamAnnotation.value(), parameterIndex, !queryParamAnnotation.encoded())); } else if (annotationType.equals(HeaderParam.class)) { final HeaderParam headerParamAnnotation = (HeaderParam) annotation; headerSubstitutions.add(new Substitution(headerParamAnnotation.value(), parameterIndex, false)); } else if (annotationType.equals(BodyParam.class)) { final BodyParam bodyParamAnnotation = (BodyParam) annotation; bodyContentMethodParameterIndex = parameterIndex; bodyContentType = bodyParamAnnotation.value(); bodyJavaType = swaggerMethod.getGenericParameterTypes()[parameterIndex]; } else if (annotationType.equals(FormParam.class)) { final FormParam formParamAnnotation = (FormParam) annotation; formSubstitutions.add(new Substitution(formParamAnnotation.value(), parameterIndex, !formParamAnnotation.encoded())); bodyContentType = ContentType.APPLICATION_X_WWW_FORM_URLENCODED; bodyJavaType = String.class; } } } this.bodyContentMethodParameterIndex = bodyContentMethodParameterIndex; this.bodyContentType = bodyContentType; this.bodyJavaType = bodyJavaType; } /** * Get the fully qualified method that was called to invoke this HTTP request. * * @return the fully qualified method that was called to invoke this HTTP request */ public String getFullyQualifiedMethodName() { return fullyQualifiedMethodName; } /** * Get the HTTP method that will be used to complete the Swagger method's request. * * @return the HTTP method that will be used to complete the Swagger method's request */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Sets the scheme and host to use for HTTP requests for this Swagger method. * * @param swaggerMethodArguments The arguments to use for scheme and host substitutions. * @param urlBuilder The {@link UrlBuilder} that will have its scheme and host set. */ public void setSchemeAndHost(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { final String substitutedHost = applySubstitutions(rawHost, hostSubstitutions, swaggerMethodArguments); final String[] substitutedHostParts = PATTERN_COLON_SLASH_SLASH.split(substitutedHost); if (substitutedHostParts.length >= 2) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHostParts[1]); } else if (substitutedHostParts.length == 1) { urlBuilder.setScheme(substitutedHostParts[0]); urlBuilder.setHost(substitutedHost); } else { urlBuilder.setHost(substitutedHost); } } /** * Get the path that will be used to complete the Swagger method's request. * * @param methodArguments the method arguments to use with the path substitutions * @return the path value with its placeholders replaced by the matching substitutions */ public String setPath(Object[] methodArguments) { return applySubstitutions(relativePath, pathSubstitutions, methodArguments); } /** * Sets the encoded query parameters that have been added to this value based on the provided method arguments into * the passed {@link UrlBuilder}. * * @param swaggerMethodArguments the arguments that will be used to create the query parameters' values * @param urlBuilder The {@link UrlBuilder} where the encoded query parameters will be set. */ public void setEncodedQueryParameters(Object[] swaggerMethodArguments, UrlBuilder urlBuilder) { if (swaggerMethodArguments == null) { return; } for (Substitution substitution : querySubstitutions) { final int parameterIndex = substitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[substitution.getMethodParameterIndex()]; String parameterValue = serialize(serializer, methodArgument); if (parameterValue != null) { if (substitution.shouldEncode()) { parameterValue = UrlEscapers.QUERY_ESCAPER.escape(parameterValue); } urlBuilder.setQueryParameter(substitution.getUrlParameterName(), parameterValue); } } } } /** * Sets the headers that have been added to this value based on the provided method arguments into the passed * {@link HttpHeaders}. * * @param swaggerMethodArguments The arguments that will be used to create the headers' values. * @param httpHeaders The {@link HttpHeaders} where the header values will be set. */ public void setHeaders(Object[] swaggerMethodArguments, HttpHeaders httpHeaders) { for (HttpHeader header : headers) { httpHeaders.set(header.getName(), header.getValuesList()); } if (swaggerMethodArguments == null) { return; } for (Substitution headerSubstitution : headerSubstitutions) { final int parameterIndex = headerSubstitution.getMethodParameterIndex(); if (0 <= parameterIndex && parameterIndex < swaggerMethodArguments.length) { final Object methodArgument = swaggerMethodArguments[headerSubstitution.getMethodParameterIndex()]; if (methodArgument instanceof Map) { @SuppressWarnings("unchecked") final Map<String, ?> headerCollection = (Map<String, ?>) methodArgument; final String headerCollectionPrefix = headerSubstitution.getUrlParameterName(); for (final Map.Entry<String, ?> headerCollectionEntry : headerCollection.entrySet()) { final String headerName = headerCollectionPrefix + headerCollectionEntry.getKey(); final String headerValue = serialize(serializer, headerCollectionEntry.getValue()); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } else { final String headerName = headerSubstitution.getUrlParameterName(); final String headerValue = serialize(serializer, methodArgument); if (headerValue != null) { httpHeaders.set(headerName, headerValue); } } } } } /** * Get the {@link Context} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the context, or {@link Context */ public Context setContext(Object[] swaggerMethodArguments) { Context context = CoreUtils.findFirstOfType(swaggerMethodArguments, Context.class); return (context != null) ? context : Context.NONE; } /** * Get the {@link RequestOptions} passed into the proxy method. * * @param swaggerMethodArguments the arguments passed to the proxy method * @return the request options */ /** * Get whether or not the provided response status code is one of the expected status codes for this Swagger * method. * * 1. If the returned int[] is null, then all 2XX status codes are considered as success code. * 2. If the returned int[] is not-null, only the codes in the array are considered as success code. * * @param statusCode the status code that was returned in the HTTP response * @return whether or not the provided response status code is one of the expected status codes for this Swagger * method */ @Override public boolean isExpectedResponseStatusCode(final int statusCode) { return expectedStatusCodes == null ? statusCode < 400 : expectedStatusCodes.get(statusCode); } /** * Get the {@link UnexpectedExceptionInformation} that will be used to generate a RestException if the HTTP response * status code is not one of the expected status codes. * * If an UnexpectedExceptionInformation is not found for the status code the default UnexpectedExceptionInformation * will be returned. * * @param code Exception HTTP status code return from a REST API. * @return the UnexpectedExceptionInformation to generate an exception to throw or return. */ @Override public UnexpectedExceptionInformation getUnexpectedException(int code) { if (exceptionMapping == null) { exceptionMapping = processUnexpectedResponseExceptionTypes(); } return exceptionMapping.getOrDefault(code, defaultException); } /** * Get the object to be used as the value of the HTTP request. * * @param swaggerMethodArguments the method arguments to get the value object from * @return the object that will be used as the body of the HTTP request */ public Object setBody(Object[] swaggerMethodArguments) { Object result = null; if (bodyContentMethodParameterIndex != null && swaggerMethodArguments != null && 0 <= bodyContentMethodParameterIndex && bodyContentMethodParameterIndex < swaggerMethodArguments.length) { result = swaggerMethodArguments[bodyContentMethodParameterIndex]; } if (!CoreUtils.isNullOrEmpty(formSubstitutions) && swaggerMethodArguments != null) { result = formSubstitutions.stream() .map(substitution -> serializeFormData(serializer, substitution.getUrlParameterName(), swaggerMethodArguments[substitution.getMethodParameterIndex()], substitution.shouldEncode())) .filter(Objects::nonNull) .collect(Collectors.joining("&")); } return result; } /** * Get the Content-Type of the body of this Swagger method. * * @return the Content-Type of the body of this Swagger method */ public String getBodyContentType() { return bodyContentType; } /** * Get the return type for the method that this object describes. * * @return the return type for the method that this object describes. */ @Override public Type getReturnType() { return returnType; } /** * Get the type of the body parameter to this method, if present. * * @return the return type of the body parameter to this method */ public Type getBodyJavaType() { return bodyJavaType; } /** * Get the type that the return value will be send across the network as. If returnValueWireType is not null, then * the raw HTTP response body will need to parsed to this type and then converted to the actual returnType. * * @return the type that the raw HTTP response body will be sent as */ @Override public Type getReturnValueWireType() { return returnValueWireType; } private static String serialize(SerializerAdapter serializer, Object value) { if (value == null) { return null; } if (value instanceof String) { return (String) value; } else { return serializer.serializeRaw(value); } } private static String serializeFormData(SerializerAdapter serializer, String key, Object value, boolean shouldEncode) { if (value == null) { return null; } String encodedKey = UrlEscapers.FORM_ESCAPER.escape(key); if (value instanceof List<?>) { return ((List<?>) value).stream() .map(element -> serializeAndEncodeFormValue(serializer, element, shouldEncode)) .filter(Objects::nonNull) .map(formValue -> encodedKey + "=" + formValue) .collect(Collectors.joining("&")); } else { return encodedKey + "=" + serializeAndEncodeFormValue(serializer, value, shouldEncode); } } private static String serializeAndEncodeFormValue(SerializerAdapter serializer, Object value, boolean shouldEncode) { if (value == null) { return null; } String serializedValue = serializer.serializeRaw(value); return shouldEncode ? UrlEscapers.FORM_ESCAPER.escape(serializedValue) : serializedValue; } private String applySubstitutions(String originalValue, Iterable<Substitution> substitutions, Object[] methodArguments) { String result = originalValue; if (methodArguments != null) { for (Substitution substitution : substitutions) { final int substitutionParameterIndex = substitution.getMethodParameterIndex(); if (0 <= substitutionParameterIndex && substitutionParameterIndex < methodArguments.length) { final Object methodArgument = methodArguments[substitutionParameterIndex]; String substitutionValue = serialize(serializer, methodArgument); if (substitutionValue != null && !substitutionValue.isEmpty() && substitution.shouldEncode()) { substitutionValue = UrlEscapers.PATH_ESCAPER.escape(substitutionValue); } if (substitutionValue == null) { substitutionValue = ""; } result = result.replace("{" + substitution.getUrlParameterName() + "}", substitutionValue); } } } return result; } private Map<Integer, UnexpectedExceptionInformation> processUnexpectedResponseExceptionTypes() { HashMap<Integer, UnexpectedExceptionInformation> exceptionHashMap = new HashMap<>(); for (UnexpectedResponseExceptionType exceptionAnnotation : unexpectedResponseExceptionTypes) { UnexpectedExceptionInformation exception = new UnexpectedExceptionInformation(exceptionAnnotation.value()); if (exceptionAnnotation.code().length == 0) { defaultException = exception; } else { for (int statusCode : exceptionAnnotation.code()) { exceptionHashMap.put(statusCode, exception); } } } if (defaultException == null) { defaultException = new UnexpectedExceptionInformation(HttpResponseException.class); } return exceptionHashMap; } }
The `header` may not exist and `getHeaders().get(header)` may return a `null` causing NPE.
public RequestOptions addHeader(String header, String value) { this.requestCallback = this.requestCallback.andThen(request -> request.getHeaders().get(header).addValue(value)); return this; }
request.getHeaders().get(header).addValue(value));
public RequestOptions addHeader(String header, String value) { this.requestCallback = this.requestCallback.andThen(request -> { HttpHeader httpHeader = request.getHeaders().get(header); if (httpHeader == null) { request.getHeaders().set(header, value); } else { httpHeader.addValue(value); } }); return this; }
class RequestOptions { private Consumer<HttpRequest> requestCallback = request -> { }; private boolean throwOnError = true; private BinaryData requestBody; /** * Gets the request callback, applying all the configurations set on this RequestOptions. * @return the request callback */ Consumer<HttpRequest> getRequestCallback() { return this.requestCallback; } /** * Gets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. * * @return true if to throw on status codes of 400 or above, false if not. Default is true. */ boolean isThrowOnError() { return this.throwOnError; } /** * Adds a header to the HTTP request. * @param header the header key * @param value the header value * * @return the modified RequestOptions object */ /** * Adds a query parameter to the request URL. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value) { return addQueryParam(parameterName, value, false); } /** * Adds a query parameter to the request URL, specifying whether the parameter is already encoded. * A value true for this argument indicates that value of {@link QueryParam * hence engine should not encode it, by default value will be encoded. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @param encoded whether or not this query parameter is already encoded * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value, boolean encoded) { this.requestCallback = this.requestCallback.andThen(request -> { String url = request.getUrl().toString(); String encodedParameterName = encoded ? parameterName : UrlEscapers.QUERY_ESCAPER.escape(parameterName); String encodedParameterValue = encoded ? value : UrlEscapers.QUERY_ESCAPER.escape(value); request.setUrl(url + (url.contains("?") ? "&" : "?") + encodedParameterName + "=" + encodedParameterValue); }); return this; } /** * Adds a custom request callback to modify the HTTP request before it's sent by the HttpClient. * The modifications made on a RequestOptions object is applied in order on the request. * * @param requestCallback the request callback * @return the modified RequestOptions object */ public RequestOptions addRequestCallback(Consumer<HttpRequest> requestCallback) { this.requestCallback = this.requestCallback.andThen(requestCallback); return this; } /** * Sets the body to send as part of the HTTP request. * @param requestBody the request body data * @return the modified RequestOptions object */ public RequestOptions setBody(BinaryData requestBody) { this.requestCallback = this.requestCallback.andThen(request -> { request.setBody(requestBody.toBytes()); }); return this; } /** * Sets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. By default an exception will be thrown when an error response is received. * * @param throwOnError true if to throw on status codes of 400 or above, false if not. Default is true. * @return the modified RequestOptions object */ public RequestOptions setThrowOnError(boolean throwOnError) { this.throwOnError = throwOnError; return this; } }
class RequestOptions { private Consumer<HttpRequest> requestCallback = request -> { }; private boolean throwOnError = true; private BinaryData requestBody; /** * Gets the request callback, applying all the configurations set on this RequestOptions. * @return the request callback */ Consumer<HttpRequest> getRequestCallback() { return this.requestCallback; } /** * Gets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. * * @return true if to throw on status codes of 400 or above, false if not. Default is true. */ boolean isThrowOnError() { return this.throwOnError; } /** * Adds a header to the HTTP request. * @param header the header key * @param value the header value * * @return the modified RequestOptions object */ /** * Adds a query parameter to the request URL. The parameter name and value will be URL encoded. * To use an already encoded parameter name and value, call {@code addQueryParam("name", "value", true)}. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value) { return addQueryParam(parameterName, value, false); } /** * Adds a query parameter to the request URL, specifying whether the parameter is already encoded. * A value true for this argument indicates that value of {@link QueryParam * hence engine should not encode it, by default value will be encoded. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @param encoded whether or not this query parameter is already encoded * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value, boolean encoded) { this.requestCallback = this.requestCallback.andThen(request -> { String url = request.getUrl().toString(); String encodedParameterName = encoded ? parameterName : UrlEscapers.QUERY_ESCAPER.escape(parameterName); String encodedParameterValue = encoded ? value : UrlEscapers.QUERY_ESCAPER.escape(value); request.setUrl(url + (url.contains("?") ? "&" : "?") + encodedParameterName + "=" + encodedParameterValue); }); return this; } /** * Adds a custom request callback to modify the HTTP request before it's sent by the HttpClient. * The modifications made on a RequestOptions object is applied in order on the request. * * @param requestCallback the request callback * @return the modified RequestOptions object */ public RequestOptions addRequestCallback(Consumer<HttpRequest> requestCallback) { this.requestCallback = this.requestCallback.andThen(requestCallback); return this; } /** * Sets the body to send as part of the HTTP request. * @param requestBody the request body data * @return the modified RequestOptions object */ public RequestOptions setBody(BinaryData requestBody) { this.requestCallback = this.requestCallback.andThen(request -> { request.setBody(requestBody.toBytes()); }); return this; } /** * Sets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. By default an exception will be thrown when an error response is received. * * @param throwOnError true if to throw on status codes of 400 or above, false if not. Default is true. * @return the modified RequestOptions object */ public RequestOptions setThrowOnError(boolean throwOnError) { this.throwOnError = throwOnError; return this; } }
fixed.
public RequestOptions addHeader(String header, String value) { this.requestCallback = this.requestCallback.andThen(request -> request.getHeaders().get(header).addValue(value)); return this; }
request.getHeaders().get(header).addValue(value));
public RequestOptions addHeader(String header, String value) { this.requestCallback = this.requestCallback.andThen(request -> { HttpHeader httpHeader = request.getHeaders().get(header); if (httpHeader == null) { request.getHeaders().set(header, value); } else { httpHeader.addValue(value); } }); return this; }
class RequestOptions { private Consumer<HttpRequest> requestCallback = request -> { }; private boolean throwOnError = true; private BinaryData requestBody; /** * Gets the request callback, applying all the configurations set on this RequestOptions. * @return the request callback */ Consumer<HttpRequest> getRequestCallback() { return this.requestCallback; } /** * Gets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. * * @return true if to throw on status codes of 400 or above, false if not. Default is true. */ boolean isThrowOnError() { return this.throwOnError; } /** * Adds a header to the HTTP request. * @param header the header key * @param value the header value * * @return the modified RequestOptions object */ /** * Adds a query parameter to the request URL. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value) { return addQueryParam(parameterName, value, false); } /** * Adds a query parameter to the request URL, specifying whether the parameter is already encoded. * A value true for this argument indicates that value of {@link QueryParam * hence engine should not encode it, by default value will be encoded. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @param encoded whether or not this query parameter is already encoded * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value, boolean encoded) { this.requestCallback = this.requestCallback.andThen(request -> { String url = request.getUrl().toString(); String encodedParameterName = encoded ? parameterName : UrlEscapers.QUERY_ESCAPER.escape(parameterName); String encodedParameterValue = encoded ? value : UrlEscapers.QUERY_ESCAPER.escape(value); request.setUrl(url + (url.contains("?") ? "&" : "?") + encodedParameterName + "=" + encodedParameterValue); }); return this; } /** * Adds a custom request callback to modify the HTTP request before it's sent by the HttpClient. * The modifications made on a RequestOptions object is applied in order on the request. * * @param requestCallback the request callback * @return the modified RequestOptions object */ public RequestOptions addRequestCallback(Consumer<HttpRequest> requestCallback) { this.requestCallback = this.requestCallback.andThen(requestCallback); return this; } /** * Sets the body to send as part of the HTTP request. * @param requestBody the request body data * @return the modified RequestOptions object */ public RequestOptions setBody(BinaryData requestBody) { this.requestCallback = this.requestCallback.andThen(request -> { request.setBody(requestBody.toBytes()); }); return this; } /** * Sets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. By default an exception will be thrown when an error response is received. * * @param throwOnError true if to throw on status codes of 400 or above, false if not. Default is true. * @return the modified RequestOptions object */ public RequestOptions setThrowOnError(boolean throwOnError) { this.throwOnError = throwOnError; return this; } }
class RequestOptions { private Consumer<HttpRequest> requestCallback = request -> { }; private boolean throwOnError = true; private BinaryData requestBody; /** * Gets the request callback, applying all the configurations set on this RequestOptions. * @return the request callback */ Consumer<HttpRequest> getRequestCallback() { return this.requestCallback; } /** * Gets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. * * @return true if to throw on status codes of 400 or above, false if not. Default is true. */ boolean isThrowOnError() { return this.throwOnError; } /** * Adds a header to the HTTP request. * @param header the header key * @param value the header value * * @return the modified RequestOptions object */ /** * Adds a query parameter to the request URL. The parameter name and value will be URL encoded. * To use an already encoded parameter name and value, call {@code addQueryParam("name", "value", true)}. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value) { return addQueryParam(parameterName, value, false); } /** * Adds a query parameter to the request URL, specifying whether the parameter is already encoded. * A value true for this argument indicates that value of {@link QueryParam * hence engine should not encode it, by default value will be encoded. * * @param parameterName the name of the query parameter * @param value the value of the query parameter * @param encoded whether or not this query parameter is already encoded * @return the modified RequestOptions object */ public RequestOptions addQueryParam(String parameterName, String value, boolean encoded) { this.requestCallback = this.requestCallback.andThen(request -> { String url = request.getUrl().toString(); String encodedParameterName = encoded ? parameterName : UrlEscapers.QUERY_ESCAPER.escape(parameterName); String encodedParameterValue = encoded ? value : UrlEscapers.QUERY_ESCAPER.escape(value); request.setUrl(url + (url.contains("?") ? "&" : "?") + encodedParameterName + "=" + encodedParameterValue); }); return this; } /** * Adds a custom request callback to modify the HTTP request before it's sent by the HttpClient. * The modifications made on a RequestOptions object is applied in order on the request. * * @param requestCallback the request callback * @return the modified RequestOptions object */ public RequestOptions addRequestCallback(Consumer<HttpRequest> requestCallback) { this.requestCallback = this.requestCallback.andThen(requestCallback); return this; } /** * Sets the body to send as part of the HTTP request. * @param requestBody the request body data * @return the modified RequestOptions object */ public RequestOptions setBody(BinaryData requestBody) { this.requestCallback = this.requestCallback.andThen(request -> { request.setBody(requestBody.toBytes()); }); return this; } /** * Sets whether or not to throw an exception when an HTTP response with a status code indicating an error * (400 or above) is received. By default an exception will be thrown when an error response is received. * * @param throwOnError true if to throw on status codes of 400 or above, false if not. Default is true. * @return the modified RequestOptions object */ public RequestOptions setThrowOnError(boolean throwOnError) { this.throwOnError = throwOnError; return this; } }
Hi, @lzc-1997-abel , could you please investigate this issue?
public void testRefreshAndGetAliasByCertificate() throws InterruptedException { Assertions.assertEquals(keyVaultCertificates.refreshAndGetAliasByCertificate(certificate), "myalias"); when(keyVaultClient.getAliases()).thenReturn(null); Thread.sleep(10); Assertions.assertNotEquals(keyVaultCertificates.refreshAndGetAliasByCertificate(certificate), "myalias"); }
Thread.sleep(10);
public void testRefreshAndGetAliasByCertificate() throws InterruptedException { Assertions.assertEquals(keyVaultCertificates.refreshAndGetAliasByCertificate(certificate), "myalias"); when(keyVaultClient.getAliases()).thenReturn(null); Thread.sleep(10); Assertions.assertNotEquals(keyVaultCertificates.refreshAndGetAliasByCertificate(certificate), "myalias"); }
class KeyVaultCertificatesTest { private final KeyVaultClient keyVaultClient = mock(KeyVaultClient.class); private final Key key = mock(Key.class); private final Certificate certificate = mock(Certificate.class); private KeyVaultCertificates keyVaultCertificates; @BeforeEach public void beforeEach() { List<String> aliases = new ArrayList<>(); aliases.add("myalias"); when(keyVaultClient.getAliases()).thenReturn(aliases); when(keyVaultClient.getKey("myalias", null)).thenReturn(key); when(keyVaultClient.getCertificate("myalias")).thenReturn(certificate); keyVaultCertificates = new KeyVaultCertificates(0, keyVaultClient); } @Test public void testGetAliases() { Assertions.assertTrue(keyVaultCertificates.getAliases().contains("myalias")); } @Test public void testGetKey() { Assertions.assertTrue(keyVaultCertificates.getCertificateKeys().containsValue(key)); } @Test public void testGetCertificate() { Assertions.assertTrue(keyVaultCertificates.getCertificates().containsValue(certificate)); } @Test @Test public void testDeleteAlias() { Assertions.assertTrue(keyVaultCertificates.getAliases().contains("myalias")); keyVaultCertificates.deleteEntry("myalias"); Assertions.assertFalse(keyVaultCertificates.getAliases().contains("myalias")); } @Test public void testCertificatesNeedRefresh() throws InterruptedException { keyVaultCertificates = new KeyVaultCertificates(1000, keyVaultClient); Assertions.assertTrue(keyVaultCertificates.certificatesNeedRefresh()); keyVaultCertificates.getAliases(); Assertions.assertFalse(keyVaultCertificates.certificatesNeedRefresh()); Thread.sleep(10); KeyVaultCertificates.updateLastForceRefreshTime(); Assertions.assertTrue(keyVaultCertificates.certificatesNeedRefresh()); keyVaultCertificates.getAliases(); Assertions.assertFalse(keyVaultCertificates.certificatesNeedRefresh()); Thread.sleep(2000); Assertions.assertTrue(keyVaultCertificates.certificatesNeedRefresh()); } }
class KeyVaultCertificatesTest { private final KeyVaultClient keyVaultClient = mock(KeyVaultClient.class); private final Key key = mock(Key.class); private final Certificate certificate = mock(Certificate.class); private KeyVaultCertificates keyVaultCertificates; @BeforeEach public void beforeEach() { List<String> aliases = new ArrayList<>(); aliases.add("myalias"); when(keyVaultClient.getAliases()).thenReturn(aliases); when(keyVaultClient.getKey("myalias", null)).thenReturn(key); when(keyVaultClient.getCertificate("myalias")).thenReturn(certificate); keyVaultCertificates = new KeyVaultCertificates(0, keyVaultClient); } @Test public void testGetAliases() { Assertions.assertTrue(keyVaultCertificates.getAliases().contains("myalias")); } @Test public void testGetKey() { Assertions.assertTrue(keyVaultCertificates.getCertificateKeys().containsValue(key)); } @Test public void testGetCertificate() { Assertions.assertTrue(keyVaultCertificates.getCertificates().containsValue(certificate)); } @Test @Test public void testDeleteAlias() { Assertions.assertTrue(keyVaultCertificates.getAliases().contains("myalias")); keyVaultCertificates.deleteEntry("myalias"); Assertions.assertFalse(keyVaultCertificates.getAliases().contains("myalias")); } @Test public void testCertificatesNeedRefresh() throws InterruptedException { keyVaultCertificates = new KeyVaultCertificates(1000, keyVaultClient); Assertions.assertTrue(keyVaultCertificates.certificatesNeedRefresh()); keyVaultCertificates.getAliases(); Assertions.assertFalse(keyVaultCertificates.certificatesNeedRefresh()); Thread.sleep(10); KeyVaultCertificates.updateLastForceRefreshTime(); Assertions.assertTrue(keyVaultCertificates.certificatesNeedRefresh()); keyVaultCertificates.getAliases(); Assertions.assertFalse(keyVaultCertificates.certificatesNeedRefresh()); Thread.sleep(2000); Assertions.assertTrue(keyVaultCertificates.certificatesNeedRefresh()); } }
Could we use try-with-resources?
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[] { 0 }; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); try { forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
progressStatus.dispose();
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[]{0}; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (InterruptedException | ExecutionException e) { System.err.println("Error occurred when submitting jobs to ForkJoinPool. " + System.lineSeparator() + e); throw new RuntimeException(e); } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
class to execute. * @param options the configuration ro run performance test with. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
class to execute. * @param options the configuration ro run performance test with. * * @throws RuntimeException if the execution fails. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
This can be removed now since we have a catch-all block below.
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[] { 0 }; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); try { forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
}
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[]{0}; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (InterruptedException | ExecutionException e) { System.err.println("Error occurred when submitting jobs to ForkJoinPool. " + System.lineSeparator() + e); throw new RuntimeException(e); } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
class to execute. * @param options the configuration ro run performance test with. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
class to execute. * @param options the configuration ro run performance test with. * * @throws RuntimeException if the execution fails. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
I think try-with-resources only works with AutoCloseable and not with Disposable.
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[] { 0 }; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); try { forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
progressStatus.dispose();
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[]{0}; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (InterruptedException | ExecutionException e) { System.err.println("Error occurred when submitting jobs to ForkJoinPool. " + System.lineSeparator() + e); throw new RuntimeException(e); } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
class to execute. * @param options the configuration ro run performance test with. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
class to execute. * @param options the configuration ro run performance test with. * * @throws RuntimeException if the execution fails. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
This is disposable. IIRC try-with-resources was for AutoCloseable?
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[] { 0 }; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); try { forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
progressStatus.dispose();
public static void runTests(PerfStressTest<?>[] tests, boolean sync, int parallel, int durationSeconds, String title) { completedOperations = new int[parallel]; lastCompletionNanoTimes = new long[parallel]; long endNanoTime = System.nanoTime() + ((long) durationSeconds * 1000000000); int[] lastCompleted = new int[]{0}; Disposable progressStatus = printStatus( "=== " + title + " ===" + System.lineSeparator() + "Current\t\tTotal\t\tAverage", () -> { int totalCompleted = getCompletedOperations(); int currentCompleted = totalCompleted - lastCompleted[0]; double averageCompleted = getOperationsPerSecond(); lastCompleted[0] = totalCompleted; return String.format("%d\t\t%d\t\t%.2f", currentCompleted, totalCompleted, averageCompleted); }, true, true); try { if (sync) { ForkJoinPool forkJoinPool = new ForkJoinPool(parallel); forkJoinPool.submit(() -> { IntStream.range(0, parallel).parallel().forEach(i -> runLoop(tests[i], i, endNanoTime)); }).get(); } else { Schedulers.onHandleError((t, e) -> { System.err.print(t + " threw exception: "); e.printStackTrace(); System.exit(1); }); Flux.range(0, parallel) .parallel() .runOn(Schedulers.boundedElastic()) .flatMap(i -> runLoopAsync(tests[i], i, endNanoTime)) .then() .block(); } } catch (InterruptedException | ExecutionException e) { System.err.println("Error occurred when submitting jobs to ForkJoinPool. " + System.lineSeparator() + e); throw new RuntimeException(e); } catch (Exception e) { System.err.println("Error occurred running tests: " + System.lineSeparator() + e); } finally { progressStatus.dispose(); } System.out.println("=== Results ==="); int totalOperations = getCompletedOperations(); if (totalOperations == 0) { throw new IllegalStateException("Zero operations has been completed"); } double operationsPerSecond = getOperationsPerSecond(); double secondsPerOperation = 1 / operationsPerSecond; double weightedAverageSeconds = totalOperations / operationsPerSecond; System.out.printf("Completed %,d operations in a weighted-average of %,.2fs (%,.2f ops/s, %,.3f s/op)%n", totalOperations, weightedAverageSeconds, operationsPerSecond, secondsPerOperation); System.out.println(); }
class to execute. * @param options the configuration ro run performance test with. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
class to execute. * @param options the configuration ro run performance test with. * * @throws RuntimeException if the execution fails. */ public static void run(Class<?> testClass, PerfStressOptions options) { System.out.println("=== Options ==="); try { ObjectMapper mapper = new ObjectMapper(); mapper.configure(SerializationFeature.INDENT_OUTPUT, true); mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false); mapper.writeValue(System.out, options); } catch (IOException e) { throw new RuntimeException(e); } System.out.println(); System.out.println(); Disposable setupStatus = printStatus("=== Setup ===", () -> ".", false, false); Disposable cleanupStatus = null; PerfStressTest<?>[] tests = new PerfStressTest<?>[options.getParallel()]; for (int i = 0; i < options.getParallel(); i++) { try { tests[i] = (PerfStressTest<?>) testClass.getConstructor(options.getClass()).newInstance(options); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | SecurityException | NoSuchMethodException e) { throw new RuntimeException(e); } } try { tests[0].globalSetupAsync().block(); try { Flux.just(tests).flatMap(PerfStressTest::setupAsync).blockLast(); setupStatus.dispose(); if (options.getWarmup() > 0) { runTests(tests, options.isSync(), options.getParallel(), options.getWarmup(), "Warmup"); } for (int i = 0; i < options.getIterations(); i++) { String title = "Test"; if (options.getIterations() > 1) { title += " " + (i + 1); } runTests(tests, options.isSync(), options.getParallel(), options.getDuration(), title); } } finally { if (!options.isNoCleanup()) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); Flux.just(tests).flatMap(t -> t.cleanupAsync()).blockLast(); } } } finally { if (!options.isNoCleanup()) { if (cleanupStatus == null) { cleanupStatus = printStatus("=== Cleanup ===", () -> ".", false, false); } tests[0].globalCleanupAsync().block(); } } if (cleanupStatus != null) { cleanupStatus.dispose(); } }
Mind also updating the Javadocs to discuss the errors thrown
public ExponentialBackoff(int maxRetries, Duration baseDelay, Duration maxDelay) { if (maxRetries < 0) { throw logger.logExceptionAsError(new IllegalArgumentException("Max retries cannot be less than 0.")); } Objects.requireNonNull(baseDelay, "'baseDelay' cannot be null."); Objects.requireNonNull(maxDelay, "'maxDelay' cannot be null."); if (baseDelay.isZero() || baseDelay.isNegative()) { throw logger.logExceptionAsError(new IllegalArgumentException("'baseDelay' cannot be negative or 0.")); } if (baseDelay.compareTo(maxDelay) > 0) { throw logger .logExceptionAsError(new IllegalArgumentException("'baseDelay' cannot be greater than 'maxDelay'.")); } this.maxRetries = maxRetries; this.baseDelay = baseDelay; this.maxDelay = maxDelay; }
if (baseDelay.isZero() || baseDelay.isNegative()) {
public ExponentialBackoff(int maxRetries, Duration baseDelay, Duration maxDelay) { if (maxRetries < 0) { throw logger.logExceptionAsError(new IllegalArgumentException("Max retries cannot be less than 0.")); } Objects.requireNonNull(baseDelay, "'baseDelay' cannot be null."); Objects.requireNonNull(maxDelay, "'maxDelay' cannot be null."); if (baseDelay.isZero() || baseDelay.isNegative()) { throw logger.logExceptionAsError(new IllegalArgumentException("'baseDelay' cannot be negative or 0.")); } if (baseDelay.compareTo(maxDelay) > 0) { throw logger .logExceptionAsError(new IllegalArgumentException("'baseDelay' cannot be greater than 'maxDelay'.")); } this.maxRetries = maxRetries; this.baseDelay = baseDelay; this.maxDelay = maxDelay; }
class ExponentialBackoff implements RetryStrategy { private static final double JITTER_FACTOR = 0.05; private static final int DEFAULT_MAX_RETRIES = 3; private static final Duration DEFAULT_BASE_DELAY = Duration.ofMillis(800); private static final Duration DEFAULT_MAX_DELAY = Duration.ofSeconds(8); private final ClientLogger logger = new ClientLogger(ExponentialBackoff.class); private final int maxRetries; private final Duration baseDelay; private final Duration maxDelay; /** * Creates an instance of {@link ExponentialBackoff} with a maximum of three retry attempts. This strategy starts * with a delay of 800 milliseconds and exponentially increases with each additional retry attempt. */ public ExponentialBackoff() { this(DEFAULT_MAX_RETRIES, DEFAULT_BASE_DELAY, DEFAULT_MAX_DELAY); } /** * Creates an instance of {@link ExponentialBackoff}. * * @param maxRetries The max retry attempts that can be made. * @param baseDelay The base delay duration for retry. * @param maxDelay The max delay duration for retry. */ @Override public int getMaxRetries() { return maxRetries; } @Override public Duration calculateRetryDelay(int retryAttempts) { long delayWithJitterInNanos = ThreadLocalRandom.current() .nextLong((long) (baseDelay.toNanos() * (1 - JITTER_FACTOR)), (long) (baseDelay.toNanos() * (1 + JITTER_FACTOR))); Duration delay = Duration.ofNanos(Math.min((1 << retryAttempts) * delayWithJitterInNanos, maxDelay.toNanos())); return delay; } }
class ExponentialBackoff implements RetryStrategy { private static final double JITTER_FACTOR = 0.05; private static final int DEFAULT_MAX_RETRIES = 3; private static final Duration DEFAULT_BASE_DELAY = Duration.ofMillis(800); private static final Duration DEFAULT_MAX_DELAY = Duration.ofSeconds(8); private final ClientLogger logger = new ClientLogger(ExponentialBackoff.class); private final int maxRetries; private final Duration baseDelay; private final Duration maxDelay; /** * Creates an instance of {@link ExponentialBackoff} with a maximum of three retry attempts. This strategy starts * with a delay of 800 milliseconds and exponentially increases with each additional retry attempt. */ public ExponentialBackoff() { this(DEFAULT_MAX_RETRIES, DEFAULT_BASE_DELAY, DEFAULT_MAX_DELAY); } /** * Creates an instance of {@link ExponentialBackoff}. * * @param maxRetries The max retry attempts that can be made. * @param baseDelay The base delay duration for retry. * @param maxDelay The max delay duration for retry. * @throws IllegalArgumentException if {@code maxRetries} is less than 0 or {@code baseDelay} is less than or * equal to 0 or {@code maxDelay} is less than {@code baseDelay}. */ @Override public int getMaxRetries() { return maxRetries; } @Override public Duration calculateRetryDelay(int retryAttempts) { long delayWithJitterInNanos = ThreadLocalRandom.current() .nextLong((long) (baseDelay.toNanos() * (1 - JITTER_FACTOR)), (long) (baseDelay.toNanos() * (1 + JITTER_FACTOR))); Duration delay = Duration.ofNanos(Math.min((1 << retryAttempts) * delayWithJitterInNanos, maxDelay.toNanos())); return delay; } }
One is throwing NPE and the other is throwing IllegalArgumentException. If the input param is null throw NPE and if it's empty throw IllegalArgumentException. See [Java guidelines](https://azure.github.io/azure-sdk/java_introduction.html#exceptions)
public HttpAuthorization(String scheme, String parameter) { if (CoreUtils.isNullOrEmpty(scheme)) { throw logger.logExceptionAsError(new IllegalArgumentException("scheme must be a nonempty string.")); } if (CoreUtils.isNullOrEmpty(parameter)) { throw logger.logExceptionAsError(new NullPointerException("parameter must be a nonempty string.")); } this.scheme = scheme; this.parameter = parameter; }
}
public HttpAuthorization(String scheme, String parameter) { Objects.requireNonNull(scheme); Objects.requireNonNull(parameter); if (scheme.isEmpty()) { throw logger.logExceptionAsError(new IllegalArgumentException("scheme must be a nonempty string.")); } if (parameter.isEmpty()) { throw logger.logExceptionAsError(new IllegalArgumentException("parameter must be a nonempty string.")); } this.scheme = scheme; this.parameter = parameter; }
class HttpAuthorization { private final ClientLogger logger = new ClientLogger(DynamicRequest.class); private final String scheme; private final String parameter; /** * Constructs a new HttpAuthorization instance. * * @param scheme Scheme component of an authorization header value. * @param parameter The credentials used for the authorization header value. */ /** * @return Scheme of the authorization header. */ public String getScheme() { return scheme; } /** * @return Credential of the authorization header. */ public String getParameter() { return parameter; } @Override public String toString() { return String.format("%s %s", scheme, parameter); } }
class HttpAuthorization { private final ClientLogger logger = new ClientLogger(HttpAuthorization.class); private final String scheme; private final String parameter; /** * Constructs a new HttpAuthorization instance. * * @param scheme Scheme component of an authorization header value. * @param parameter The credentials used for the authorization header value. * @throws NullPointerException if any argument is null. * @throws IllegalArgumentException if any argument is an empty string. */ /** * @return Scheme of the authorization header. */ public String getScheme() { return scheme; } /** * @return Credential of the authorization header. */ public String getParameter() { return parameter; } @Override public String toString() { return String.format("%s %s", scheme, parameter); } }
Format this code.
public static void putEnvironmentPropertyToSystemPropertyForKeyVaultJca() { KEYVAULT_JCA_SYSTEM_PROPERTIES.forEach( environmentPropertyKey -> { String value = System.getenv(environmentPropertyKey); String systemPropertyKey = environmentPropertyKey.replaceFirst("CERTIFICATE_", "").toLowerCase().replaceFirst("azure_keyvault_", "azure.keyvault.").replaceAll("_", "-"); System.getProperties().put(systemPropertyKey, value); } ); }
String systemPropertyKey = environmentPropertyKey.replaceFirst("CERTIFICATE_", "").toLowerCase().replaceFirst("azure_keyvault_",
public static void putEnvironmentPropertyToSystemPropertyForKeyVaultJca() { KEYVAULT_JCA_SYSTEM_PROPERTIES.forEach( environmentPropertyKey -> { String value = System.getenv(environmentPropertyKey); String systemPropertyKey = environmentPropertyKey.toLowerCase().replaceFirst("azure_keyvault_", "azure.keyvault.").replaceAll("_", "-"); System.getProperties().put(systemPropertyKey, value); } ); }
class PropertyConvertorUtils { public static final List<String> KEYVAULT_JCA_SYSTEM_PROPERTIES = Arrays.asList("AZURE_KEYVAULT_ENDPOINT", "AZURE_KEYVAULT_TENANT_ID", "AZURE_KEYVAULT_CLIENT_ID", "AZURE_KEYVAULT_CLIENT_SECRET", "CERTIFICATE_AZURE_KEYVAULT_URI"); public static KeyStore getKeyVaultKeyStore() throws CertificateException, NoSuchAlgorithmException, IOException, KeyStoreException { KeyStore keyStore = KeyStore.getInstance("AzureKeyVault"); KeyVaultLoadStoreParameter parameter = new KeyVaultLoadStoreParameter( System.getenv("AZURE_KEYVAULT_ENDPOINT"), System.getenv("AZURE_KEYVAULT_TENANT_ID"), System.getenv("AZURE_KEYVAULT_CLIENT_ID"), System.getenv("AZURE_KEYVAULT_CLIENT_SECRET")); keyStore.load(parameter); return keyStore; } public static void addKeyVaultJcaProvider() { KeyVaultJcaProvider provider = new KeyVaultJcaProvider(); Security.addProvider(provider); } }
class PropertyConvertorUtils { public static final List<String> KEYVAULT_JCA_SYSTEM_PROPERTIES = Arrays.asList("AZURE_KEYVAULT_ENDPOINT", "AZURE_KEYVAULT_TENANT_ID", "AZURE_KEYVAULT_CLIENT_ID", "AZURE_KEYVAULT_CLIENT_SECRET", "AZURE_KEYVAULT_URI"); public static KeyStore getKeyVaultKeyStore() throws CertificateException, NoSuchAlgorithmException, IOException, KeyStoreException { KeyStore keyStore = KeyStore.getInstance("AzureKeyVault"); KeyVaultLoadStoreParameter parameter = new KeyVaultLoadStoreParameter( System.getenv("AZURE_KEYVAULT_ENDPOINT"), System.getenv("AZURE_KEYVAULT_TENANT_ID"), System.getenv("AZURE_KEYVAULT_CLIENT_ID"), System.getenv("AZURE_KEYVAULT_CLIENT_SECRET")); keyStore.load(parameter); return keyStore; } public static void addKeyVaultJcaProvider() { KeyVaultJcaProvider provider = new KeyVaultJcaProvider(); Security.addProvider(provider); } }
Should still document that an exception is thrown by the method even if it isn't checked. Also, should this be an `IllegalArgumentException` to be more targeted?
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new RuntimeException(e); } }
throw new RuntimeException(e);
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. * @throws IllegalArgumentException If the given {@link String URL String} is malformed. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
I think `IllegalArgumentException` is fine but since it a static method I'm not sure if( we should use a logger or not. I don't think we want to have static loggers. Any thoughts @srngar?
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new RuntimeException(e); } }
throw new RuntimeException(e);
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. * @throws IllegalArgumentException If the given {@link String URL String} is malformed. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
Not using a logger for now, but did change to the exception type you suggested
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new RuntimeException(e); } }
throw new RuntimeException(e);
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. * @throws IllegalArgumentException If the given {@link String URL String} is malformed. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
IllegalArgumentException should be logged and thrown here. You can use static final loggers in static methods.
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new RuntimeException(e); } }
throw new RuntimeException(e);
public static KeyVaultRoleScope fromUrl(String url) { try { return fromString(new URL(url).getPath(), KeyVaultRoleScope.class); } catch (MalformedURLException e) { throw new IllegalArgumentException(e); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
class KeyVaultRoleScope extends ExpandableStringEnum<KeyVaultRoleScope> { public static final KeyVaultRoleScope GLOBAL = fromString("/"); public static final KeyVaultRoleScope KEYS = fromString("/keys"); /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param name A name to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromString(String name) { return fromString(name, KeyVaultRoleScope.class); } /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A string representing a URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. * @throws IllegalArgumentException If the given {@link String URL String} is malformed. */ /** * Creates or finds a {@link KeyVaultRoleScope} from its string representation. * * @param url A URL containing the name of the scope to look for. * @return The corresponding {@link KeyVaultRoleScope}. */ public static KeyVaultRoleScope fromUrl(URL url) { return fromString(url.getPath(), KeyVaultRoleScope.class); } }
Validate that the command line args include input, output and pom file locations. If not, throw an exception with information on how to run this app. #Resolved
public static void main(String[] args) { BomGenerator generator = new BomGenerator(); parseCommandLine(args, generator); generator.generate(); }
parseCommandLine(args, generator);
public static void main(String[] args) { BomGenerator generator = parseCommandLine(args); generator.generate(); }
class Main { private static void parseCommandLine(String[] args, BomGenerator generator) { for (String arg : args) { Matcher matcher = Utils.COMMANDLINE_REGEX.matcher(arg); if (matcher.matches()) { if (matcher.groupCount() == 2) { String argName = matcher.group(1); String argValue = matcher.group(2); switch (argName.toLowerCase()) { case COMMANDLINE_INPUTFILE: generator.setInputFile(argValue); break; case COMMANDLINE_OUTPUTFILE: generator.setOutputFile(argValue); break; case COMMANDLINE_POMFILE: generator.setPomFile(argValue); break; case COMMANDLINE_EXTERNALDEPENDENCIES: generator.setExternalDependenciesFile((argValue)); break; } } } } } }
class Main { private static BomGenerator parseCommandLine(String[] args) { String inputFile = null, outputFile = null, pomFile = null; for (String arg : args) { Matcher matcher = Utils.COMMANDLINE_REGEX.matcher(arg); if (matcher.matches()) { if (matcher.groupCount() == 2) { String argName = matcher.group(1); String argValue = matcher.group(2); switch (argName.toLowerCase()) { case COMMANDLINE_INPUTFILE: inputFile = argValue; break; case COMMANDLINE_OUTPUTFILE: outputFile = argValue; break; case COMMANDLINE_POMFILE: pomFile = argValue; break; } } } } validateInputs(inputFile, outputFile, pomFile); return new BomGenerator(inputFile, outputFile, pomFile); } private static void validateInputs(String inputFile, String outputFile, String pomFile) { validateInput(inputFile, COMMANDLINE_INPUTFILE); validateInput(outputFile, COMMANDLINE_OUTPUTFILE); validateInput(pomFile, COMMANDLINE_POMFILE); } private static void validateInput(String argName, String argValue) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } }
These methods seem like they are both writing the bom file. #Resolved
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> inputDependencies = scan();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Since we need to make multiple calls to get the pom files we could use [HttpClient](https://docs.oracle.com/en/java/javase/11/docs/api/java.net.http/java/net/http/HttpClient.html) that was introduced in JDK 11. It provides async APIs. Also, get the list of all dependencies and use a single client to retrieve the pom files. #Resolved
public static List<BomDependency> getPomFileContent(Dependency dependency) { HttpURLConnection connection = null; try { String[] groups = dependency.getGroupId().split("[.]"); URL url = null; if(groups.length == 2) { url = new URL("https: } else if (groups.length == 3) { url = new URL("https: } connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("accept", "application/xml"); connection.setConnectTimeout(5000); connection.setReadTimeout(5000); int responseCode = connection.getResponseCode(); if (HttpURLConnection.HTTP_OK == responseCode) { InputStream responseStream = connection.getInputStream(); MavenXpp3Reader reader = new MavenXpp3Reader(); Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } } catch (Exception exception) { } finally { if (connection != null) { connection.disconnect(); } } return null; }
HttpURLConnection connection = null;
public static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { return Utils.parsePomFileContent(response.body()); } return null; }).join(); }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final String COMMANDLINE_EXCLUSIONLIST = "exclusionList"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+);(.+)"); public static final Pattern EXTERNAL_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+)"); public static final Pattern SDK_NON_GA_PATTERN = Pattern.compile("(.+)-(.+)"); public static final String AZURE_CORE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final String AZURE_CORE_TEST_LIBRARY = "azure-core-test"; public static final String CONFLICTING_DEPENDENCIES = "conflict"; public static final String BOM_ELIGIBLE = "bom"; public static final HashSet<String> EXTERNAL_BOM_DEPENDENCIES = new HashSet<String>(Arrays.asList( "io.projectreactor", "com.fasterxml.jackson", "io.netty", "io.projectreactor.netty" )); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); public static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } public static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } private static List<BomDependency> parsePomFileContent(InputStream responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
#### TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); --- Is `TreeSet` being used here to maintain the set in a certain sort order? #Resolved
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> externalDependencies = resolveExternalDependencies();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Is there a way that `analyze` could run without requiring a `retainAll` call given that `inputDependencies` is a constructor parameter to `DependencyAnalyzer`? #Resolved
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> outputDependencies = analyzer.analyze();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
#### .asSingleResolvedArtifact(); --- Does the ordering of these calls matter at all? #Resolved
private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(gaLibrary) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); BomDependency parentDependency = new BomDependency(mavenResolvedArtifact.getCoordinate()); MavenArtifactInfo[] dependencies = mavenResolvedArtifact.getDependencies(); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); for (MavenArtifactInfo dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getCoordinate().getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getCoordinate()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } }
.asSingleResolvedArtifact();
private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } }
class DependencyAnalyzer { private TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeSet<BomDependency> bomEligibleDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeSet<BomDependency> bomIneligibleDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeMap<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new BomDependencyNonVersionComparator()); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(TreeSet<BomDependency> inputDependencies, TreeSet<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public TreeSet<BomDependency> analyze() { resolveTree(); filterConflicts(); return this.bomEligibleDependencies; } public Boolean validate() { resolveTree(); return filterConflicts(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, TreeMap<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent)); } } private boolean resolveConflicts() { AtomicBoolean hasConflict = new AtomicBoolean(false); nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() > 1) { hasConflict.set(true); List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String latestVersion = versionList.get(versionList.size() - 1); logger.info("Multiple version of the dependency {} included", key); logger.info("\tPicking the latest version for BOM: {}", latestVersion); BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), latestVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (int index = 0; index < versionList.size() - 1; index++) { String version = versionList.get(index); makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version)); } } }); bomEligibleDependencies.removeAll(bomIneligibleDependencies); return hasConflict.get(); } private boolean filterConflicts() { boolean hasconflict = resolveConflicts(); nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); return hasconflict; } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
Could we use an external dependency for command line argument parsing? I'd recommend using what the perf test framework uses given we have expertise with that.
public static void main(String[] args) { BomGenerator generator = new BomGenerator(); parseCommandLine(args, generator); generator.generate(); }
parseCommandLine(args, generator);
public static void main(String[] args) { BomGenerator generator = parseCommandLine(args); generator.generate(); }
class Main { private static void parseCommandLine(String[] args, BomGenerator generator) { for (String arg : args) { Matcher matcher = Utils.COMMANDLINE_REGEX.matcher(arg); if (matcher.matches()) { if (matcher.groupCount() == 2) { String argName = matcher.group(1); String argValue = matcher.group(2); switch (argName.toLowerCase()) { case COMMANDLINE_INPUTFILE: generator.setInputFile(argValue); break; case COMMANDLINE_OUTPUTFILE: generator.setOutputFile(argValue); break; case COMMANDLINE_POMFILE: generator.setPomFile(argValue); break; case COMMANDLINE_EXTERNALDEPENDENCIES: generator.setExternalDependenciesFile((argValue)); break; } } } } } }
class Main { private static BomGenerator parseCommandLine(String[] args) { String inputFile = null, outputFile = null, pomFile = null; for (String arg : args) { Matcher matcher = Utils.COMMANDLINE_REGEX.matcher(arg); if (matcher.matches()) { if (matcher.groupCount() == 2) { String argName = matcher.group(1); String argValue = matcher.group(2); switch (argName.toLowerCase()) { case COMMANDLINE_INPUTFILE: inputFile = argValue; break; case COMMANDLINE_OUTPUTFILE: outputFile = argValue; break; case COMMANDLINE_POMFILE: pomFile = argValue; break; } } } } validateInputs(inputFile, outputFile, pomFile); return new BomGenerator(inputFile, outputFile, pomFile); } private static void validateInputs(String inputFile, String outputFile, String pomFile) { validateInput(inputFile, COMMANDLINE_INPUTFILE); validateInput(outputFile, COMMANDLINE_OUTPUTFILE); validateInput(pomFile, COMMANDLINE_POMFILE); } private static void validateInput(String argName, String argValue) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } }
Given that we are using Maven dependencies in this project already, is there a library which helps with downloading a Maven POM file's contents and stores it in the users .m2, and if the POM file is already in the .m2 it uses that? Basically, using the same logic that Maven commands use. #Pending
public static List<BomDependency> getPomFileContent(Dependency dependency) { HttpURLConnection connection = null; try { String[] groups = dependency.getGroupId().split("[.]"); URL url = null; if(groups.length == 2) { url = new URL("https: } else if (groups.length == 3) { url = new URL("https: } connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("accept", "application/xml"); connection.setConnectTimeout(5000); connection.setReadTimeout(5000); int responseCode = connection.getResponseCode(); if (HttpURLConnection.HTTP_OK == responseCode) { InputStream responseStream = connection.getInputStream(); MavenXpp3Reader reader = new MavenXpp3Reader(); Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } } catch (Exception exception) { } finally { if (connection != null) { connection.disconnect(); } } return null; }
String[] groups = dependency.getGroupId().split("[.]");
public static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { return Utils.parsePomFileContent(response.body()); } return null; }).join(); }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final String COMMANDLINE_EXCLUSIONLIST = "exclusionList"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+);(.+)"); public static final Pattern EXTERNAL_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+)"); public static final Pattern SDK_NON_GA_PATTERN = Pattern.compile("(.+)-(.+)"); public static final String AZURE_CORE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final String AZURE_CORE_TEST_LIBRARY = "azure-core-test"; public static final String CONFLICTING_DEPENDENCIES = "conflict"; public static final String BOM_ELIGIBLE = "bom"; public static final HashSet<String> EXTERNAL_BOM_DEPENDENCIES = new HashSet<String>(Arrays.asList( "io.projectreactor", "com.fasterxml.jackson", "io.netty", "io.projectreactor.netty" )); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); public static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } public static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } private static List<BomDependency> parsePomFileContent(InputStream responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
Mostly because AFAIK treeset is the only supported Java DS that allows comparator in the set.
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> externalDependencies = resolveExternalDependencies();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Yeah, I think I can clean this up a little. The reason why it today does that is because I re-run the analyze in 2 phases and 2nd phase has some context. I defintely can pass the context in a different way!
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> outputDependencies = analyzer.analyze();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Added validation. Given that as of now, it is only for a couple of arguments and that this tool may not live in this repo for long - I will take the second comment as a TODO for later.
public static void main(String[] args) { BomGenerator generator = new BomGenerator(); parseCommandLine(args, generator); generator.generate(); }
parseCommandLine(args, generator);
public static void main(String[] args) { BomGenerator generator = parseCommandLine(args); generator.generate(); }
class Main { private static void parseCommandLine(String[] args, BomGenerator generator) { for (String arg : args) { Matcher matcher = Utils.COMMANDLINE_REGEX.matcher(arg); if (matcher.matches()) { if (matcher.groupCount() == 2) { String argName = matcher.group(1); String argValue = matcher.group(2); switch (argName.toLowerCase()) { case COMMANDLINE_INPUTFILE: generator.setInputFile(argValue); break; case COMMANDLINE_OUTPUTFILE: generator.setOutputFile(argValue); break; case COMMANDLINE_POMFILE: generator.setPomFile(argValue); break; case COMMANDLINE_EXTERNALDEPENDENCIES: generator.setExternalDependenciesFile((argValue)); break; } } } } } }
class Main { private static BomGenerator parseCommandLine(String[] args) { String inputFile = null, outputFile = null, pomFile = null; for (String arg : args) { Matcher matcher = Utils.COMMANDLINE_REGEX.matcher(arg); if (matcher.matches()) { if (matcher.groupCount() == 2) { String argName = matcher.group(1); String argValue = matcher.group(2); switch (argName.toLowerCase()) { case COMMANDLINE_INPUTFILE: inputFile = argValue; break; case COMMANDLINE_OUTPUTFILE: outputFile = argValue; break; case COMMANDLINE_POMFILE: pomFile = argValue; break; } } } } validateInputs(inputFile, outputFile, pomFile); return new BomGenerator(inputFile, outputFile, pomFile); } private static void validateInputs(String inputFile, String outputFile, String pomFile) { validateInput(inputFile, COMMANDLINE_INPUTFILE); validateInput(outputFile, COMMANDLINE_OUTPUTFILE); validateInput(pomFile, COMMANDLINE_POMFILE); } private static void validateInput(String argName, String argValue) { if(argValue == null || argValue.isEmpty()) { throw new NullPointerException(String.format("%s can't be null", argName)); } } }
Yeah this is just a convenience added to make the life easier for the reviewer of the BOM for the first time the tool is run. The tool writes the BOM in a particular order, so the first time the tool is run I rewrite the existing POM to also have that order so the diff is easier - I will be removing this code in the later iteration (or it will automatically become no-op after the fact).
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> inputDependencies = scan();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Added a comment for it.
public void generate() { TreeSet<BomDependency> inputDependencies = scan(); TreeSet<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); TreeSet<BomDependency> outputDependencies = analyzer.analyze(); outputDependencies.retainAll(inputDependencies); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); outputDependencies = analyzer.analyze(); boolean validationPassed = analyzer.validate(); if(validationPassed) { rewriteBomFile(); writeBom(outputDependencies); } else { logger.info("Validation for the BOM failed. Exiting..."); } }
TreeSet<BomDependency> inputDependencies = scan();
public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private String externalDependenciesFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator() { } public void setInputFile(String inputFileName) { this.inputFileName = inputFileName; } public void setOutputFile(String outputFileName) { this.outputFileName = outputFileName; } public void setPomFile(String pomFileName) { this.pomFileName = pomFileName; } public void setExternalDependenciesFile(String externalDependenciesFileName) { this.externalDependenciesFileName = externalDependenciesFileName; } private TreeSet<BomDependency> scan() { TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { if (line.startsWith("com.azure")) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (matcher.matches()) { if (matcher.groupCount() == 4) { String groupId = matcher.group(1); String artifactId = matcher.group(2); String version = matcher.group(3); Matcher nonGAMatcher = SDK_NON_GA_PATTERN.matcher(version); if (!nonGAMatcher.matches()) { BomDependency dependency = new BomDependency(groupId, artifactId, version); if (AZURE_CORE_GROUPID.equalsIgnoreCase(groupId)) { switch (artifactId) { case "azure-sdk-all": case "azure-sdk-parent": case "azure-client-sdk-parent": break; default: if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER) && !artifactId.equalsIgnoreCase(AZURE_CORE_TEST_LIBRARY))) { logger.info("Skipping dependency {}:{}", groupId, artifactId); continue; } inputDependencies.add(dependency); break; } } } } } } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private TreeSet<BomDependency> resolveExternalDependencies() { TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); for (Dependency externalDependency : externalBomDependencies) { externalDependencies.addAll(Utils.getPomFileContent(externalDependency)); } } catch (XmlPullParserException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException exception) { exception.printStackTrace(); } return externalDependencies; } private void rewriteBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(TreeSet<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Could this be made into a singleton? #Resolved
public static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { HttpClient httpClient = HttpClient.newHttpClient(); List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(httpClient, dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; }
HttpClient httpClient = HttpClient.newHttpClient();
public static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String AZURE_CORE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; public static List<BomDependency> getPomFileContent(HttpClient client, Dependency dependency) { String[] groups = dependency.getGroupId().split("[.]"); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { return Utils.parsePomFileContent(response.body()); } return null; }).join(); } private static List<BomDependency> parsePomFileContent(InputStream responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); public static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { return Utils.parsePomFileContent(response.body()); } return null; }).join(); } public static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } private static List<BomDependency> parsePomFileContent(InputStream responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
I actually searched for that and spend a good amount of time finding whether srinkwrap or any other library can do it. But couldn'tt find anything. In fact this ask was made on shrinkwrap since 2014 but hasn't seen any traction.
public static List<BomDependency> getPomFileContent(Dependency dependency) { HttpURLConnection connection = null; try { String[] groups = dependency.getGroupId().split("[.]"); URL url = null; if(groups.length == 2) { url = new URL("https: } else if (groups.length == 3) { url = new URL("https: } connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("accept", "application/xml"); connection.setConnectTimeout(5000); connection.setReadTimeout(5000); int responseCode = connection.getResponseCode(); if (HttpURLConnection.HTTP_OK == responseCode) { InputStream responseStream = connection.getInputStream(); MavenXpp3Reader reader = new MavenXpp3Reader(); Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } } catch (Exception exception) { } finally { if (connection != null) { connection.disconnect(); } } return null; }
String[] groups = dependency.getGroupId().split("[.]");
public static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { return Utils.parsePomFileContent(response.body()); } return null; }).join(); }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final String COMMANDLINE_EXCLUSIONLIST = "exclusionList"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+);(.+)"); public static final Pattern EXTERNAL_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+)"); public static final Pattern SDK_NON_GA_PATTERN = Pattern.compile("(.+)-(.+)"); public static final String AZURE_CORE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final String AZURE_CORE_TEST_LIBRARY = "azure-core-test"; public static final String CONFLICTING_DEPENDENCIES = "conflict"; public static final String BOM_ELIGIBLE = "bom"; public static final HashSet<String> EXTERNAL_BOM_DEPENDENCIES = new HashSet<String>(Arrays.asList( "io.projectreactor", "com.fasterxml.jackson", "io.netty", "io.projectreactor.netty" )); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); public static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } public static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } private static List<BomDependency> parsePomFileContent(InputStream responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
If you know of something - happy to use it!
public static List<BomDependency> getPomFileContent(Dependency dependency) { HttpURLConnection connection = null; try { String[] groups = dependency.getGroupId().split("[.]"); URL url = null; if(groups.length == 2) { url = new URL("https: } else if (groups.length == 3) { url = new URL("https: } connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("accept", "application/xml"); connection.setConnectTimeout(5000); connection.setReadTimeout(5000); int responseCode = connection.getResponseCode(); if (HttpURLConnection.HTTP_OK == responseCode) { InputStream responseStream = connection.getInputStream(); MavenXpp3Reader reader = new MavenXpp3Reader(); Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } } catch (Exception exception) { } finally { if (connection != null) { connection.disconnect(); } } return null; }
String[] groups = dependency.getGroupId().split("[.]");
public static List<BomDependency> getPomFileContent(Dependency dependency) { String[] groups = STRING_SPLIT_BY_DOT.split(dependency.getGroupId()); String url = null; if(groups.length == 2) { url = "https: } else if (groups.length == 3) { url = "https: } else { throw new UnsupportedOperationException("Can't parse the external BOM file."); } HttpRequest request = HttpRequest.newBuilder() .uri(URI.create(url)) .GET() .header("accept", "application/xml") .timeout(Duration.ofMillis(5000)) .build(); return HTTP_CLIENT.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream()) .thenApply(response -> { if(response.statusCode() == 200) { return Utils.parsePomFileContent(response.body()); } return null; }).join(); }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final String COMMANDLINE_EXCLUSIONLIST = "exclusionList"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+);(.+)"); public static final Pattern EXTERNAL_DEPENDENCY_PATTERN = Pattern.compile("(.+):(.+);(.+)"); public static final Pattern SDK_NON_GA_PATTERN = Pattern.compile("(.+)-(.+)"); public static final String AZURE_CORE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final String AZURE_CORE_TEST_LIBRARY = "azure-core-test"; public static final String CONFLICTING_DEPENDENCIES = "conflict"; public static final String BOM_ELIGIBLE = "bom"; public static final HashSet<String> EXTERNAL_BOM_DEPENDENCIES = new HashSet<String>(Arrays.asList( "io.projectreactor", "com.fasterxml.jackson", "io.netty", "io.projectreactor.netty" )); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
class Utils { public static final String COMMANDLINE_INPUTFILE = "inputfile"; public static final String COMMANDLINE_OUTPUTFILE = "outputfile"; public static final String COMMANDLINE_POMFILE = "pomfile"; public static final String COMMANDLINE_EXTERNALDEPENDENCIES = "externalDependencies"; public static final String COMMANDLINE_GROUPID = "groupid"; public static final Pattern COMMANDLINE_REGEX = Pattern.compile("-(.*)=(.*)"); public static final List<String> EXCLUSION_LIST = Arrays.asList("azure-spring-data-cosmos", "azure-spring-data-cosmos-test", "azure-core-test", "azure-sdk-all", "azure-sdk-parent", "azure-client-sdk-parent"); public static final Pattern SDK_DEPENDENCY_PATTERN = Pattern.compile("com.azure:(.+);(.+);(.+)"); public static final String BASE_AZURE_GROUPID = "com.azure"; public static final String AZURE_TEST_LIBRARY_IDENTIFIER = "-test"; public static final String AZURE_PERF_LIBRARY_IDENTIFIER = "-perf"; public static final HttpClient HTTP_CLIENT = HttpClient.newHttpClient(); public static final Pattern STRING_SPLIT_BY_DOT = Pattern.compile("[.]"); public static final HashSet<String> RESOLVED_EXCLUSION_LIST = new HashSet<>(Arrays.asList( "junit-jupiter-api" )); public static final String POM_TYPE = "pom"; private static Logger logger = LoggerFactory.getLogger(Utils.class); public static List<BomDependency> getExternalDependenciesContent(List<Dependency> dependencies) { List<BomDependency> allResolvedDependencies = new ArrayList<>(); for (Dependency dependency : dependencies) { List<BomDependency> resolvedDependencies = getPomFileContent(dependency); if (resolvedDependencies != null) { allResolvedDependencies.addAll(resolvedDependencies); } } return allResolvedDependencies; } public static BomDependencyNoVersion toBomDependencyNoVersion(BomDependency bomDependency) { return new BomDependencyNoVersion(bomDependency.getGroupId(), bomDependency.getArtifactId()); } private static List<BomDependency> parsePomFileContent(InputStream responseStream) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(responseStream); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().map(dep -> { String version = getPropertyName(dep.getVersion()); while(model.getProperties().getProperty(version) != null) { version = getPropertyName(model.getProperties().getProperty(version)); } if(version == null) { version = dep.getVersion(); } BomDependency bomDependency = new BomDependency(dep.getGroupId(), dep.getArtifactId(), version); return bomDependency; }).collect(Collectors.toList()); } catch (IOException exception) { exception.printStackTrace(); } catch (XmlPullParserException e) { e.printStackTrace(); } return null; } private static String getPropertyName(String propertyValue) { if(propertyValue.startsWith("${")) { return propertyValue.substring(2, propertyValue.length() - 1); } return propertyValue; } }
Yes. Each call returns a different return type.
private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(gaLibrary) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); BomDependency parentDependency = new BomDependency(mavenResolvedArtifact.getCoordinate()); MavenArtifactInfo[] dependencies = mavenResolvedArtifact.getDependencies(); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); for (MavenArtifactInfo dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getCoordinate().getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getCoordinate()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } }
.asSingleResolvedArtifact();
private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } }
class DependencyAnalyzer { private TreeSet<BomDependency> inputDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeSet<BomDependency> externalDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeSet<BomDependency> bomEligibleDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeSet<BomDependency> bomIneligibleDependencies = new TreeSet<>(new BomDependencyComparator()); private TreeMap<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new BomDependencyNonVersionComparator()); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(TreeSet<BomDependency> inputDependencies, TreeSet<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public TreeSet<BomDependency> analyze() { resolveTree(); filterConflicts(); return this.bomEligibleDependencies; } public Boolean validate() { resolveTree(); return filterConflicts(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, TreeMap<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent)); } } private boolean resolveConflicts() { AtomicBoolean hasConflict = new AtomicBoolean(false); nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() > 1) { hasConflict.set(true); List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String latestVersion = versionList.get(versionList.size() - 1); logger.info("Multiple version of the dependency {} included", key); logger.info("\tPicking the latest version for BOM: {}", latestVersion); BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), latestVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (int index = 0; index < versionList.size() - 1; index++) { String version = versionList.get(index); makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version)); } } }); bomEligibleDependencies.removeAll(bomIneligibleDependencies); return hasConflict.get(); } private boolean filterConflicts() { boolean hasconflict = resolveConflicts(); nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); return hasconflict; } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
This should fail more loudly as this error could happen mid-processing of the file #Resolved
private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; }
exception.printStackTrace();
private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", AZURE_CORE_GROUPID, artifactId); return null; } return new BomDependency(AZURE_CORE_GROUPID, artifactId, version); } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); } catch (XmlPullParserException | IOException e) { e.printStackTrace(); } return externalDependencies; } private void rewriteExistingBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(Collection<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
Could we make model reading into a utility method? There is a few locations that have file -> model logic that could be consolidated. #Resolved
private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); } catch (XmlPullParserException | IOException e) { e.printStackTrace(); } return externalDependencies; }
Model model = reader.read(new FileReader(this.pomFileName));
private List<BomDependency> resolveExternalDependencies() { List<BomDependency> externalDependencies = new ArrayList<>(); List<Dependency> externalBomDependencies = getExternalDependencies(); externalDependencies.addAll(Utils.getExternalDependenciesContent(externalBomDependencies)); return externalDependencies; }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { exception.printStackTrace(); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", AZURE_CORE_GROUPID, artifactId); return null; } return new BomDependency(AZURE_CORE_GROUPID, artifactId, version); } private void rewriteExistingBomFile() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.pomFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } private void writeBom(Collection<BomDependency> bomDependencies) { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); MavenXpp3Writer writer = new MavenXpp3Writer(); writer.write(new FileWriter(this.outputFileName), model); } catch (Exception exception) { exception.printStackTrace(); } } }
class BomGenerator { private String outputFileName; private String inputFileName; private String pomFileName; private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); BomGenerator(String inputFileName, String outputFileName, String pomFileName) { this.inputFileName = inputFileName; this.outputFileName = outputFileName; this.pomFileName = pomFileName; } public void generate() { List<BomDependency> inputDependencies = scan(); List<BomDependency> externalDependencies = resolveExternalDependencies(); DependencyAnalyzer analyzer = new DependencyAnalyzer(inputDependencies, externalDependencies); analyzer.reduce(); Collection<BomDependency> outputDependencies = analyzer.getBomEligibleDependencies(); analyzer = new DependencyAnalyzer(outputDependencies, externalDependencies); boolean validationFailed = analyzer.validate(); outputDependencies = analyzer.getBomEligibleDependencies(); if(!validationFailed) { rewriteExistingBomFile(); writeBom(outputDependencies); } else { logger.trace("Validation for the BOM failed. Exiting..."); } } private List<BomDependency> scan() { List<BomDependency> inputDependencies = new ArrayList<>(); try { for (String line : Files.readAllLines(Paths.get(inputFileName))) { BomDependency dependency = scanDependency(line); if(dependency != null) { inputDependencies.add(dependency); } } } catch (IOException exception) { logger.error("Input file parsing failed. Exception{}", exception.toString()); } return inputDependencies; } private BomDependency scanDependency(String line) { Matcher matcher = SDK_DEPENDENCY_PATTERN.matcher(line); if (!matcher.matches()) { return null; } if (matcher.groupCount() != 3) { return null; } String artifactId = matcher.group(1); String version = matcher.group(2); if(version.contains("-")) { return null; } if (EXCLUSION_LIST.contains(artifactId) || artifactId.contains(AZURE_PERF_LIBRARY_IDENTIFIER) || (artifactId.contains(AZURE_TEST_LIBRARY_IDENTIFIER))) { logger.trace("Skipping dependency {}:{}", BASE_AZURE_GROUPID, artifactId); return null; } return new BomDependency(BASE_AZURE_GROUPID, artifactId, version); } private Model readModel() { MavenXpp3Reader reader = new MavenXpp3Reader(); try { Model model = reader.read(new FileReader(this.pomFileName)); return model; } catch (XmlPullParserException | IOException e) { logger.error("BOM reading failed with: {}", e.toString()); } return null; } private void writeModel(Model model) { String pomFileName = this.pomFileName; writeModel(pomFileName, model); } private void writeModel(String fileName, Model model) { MavenXpp3Writer writer = new MavenXpp3Writer(); try { writer.write(new FileWriter(fileName), model); } catch (IOException exception) { logger.error("BOM writing failed with: {}", exception.toString()); } } private List<Dependency> getExternalDependencies() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); return management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); } private void rewriteExistingBomFile() { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> dependencies = management.getDependencies(); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(model); } private void writeBom(Collection<BomDependency> bomDependencies) { Model model = readModel(); DependencyManagement management = model.getDependencyManagement(); List<Dependency> externalBomDependencies = management.getDependencies().stream().filter(dependency -> dependency.getType().equals(POM_TYPE)).collect(Collectors.toList()); List<Dependency> dependencies = bomDependencies.stream().map(bomDependency -> { Dependency dependency = new Dependency(); dependency.setGroupId(bomDependency.getGroupId()); dependency.setArtifactId(bomDependency.getArtifactId()); dependency.setVersion(bomDependency.getVersion()); return dependency; }).collect(Collectors.toList()); dependencies.addAll(externalBomDependencies); dependencies.sort(new DependencyComparator()); management.setDependencies(dependencies); writeModel(this.outputFileName, model); } }
This could be simplified to just use `values()` which is a Collection of the values in the map: ```java return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); ``` #Resolved
public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); }
return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1);
public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
While attempting to understand the logic here I had to jump around in the file a lot, could these methods be declared in the class in the order they are used? #Resolved
private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); }
filterConflicts();
private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
This may be a spot to use loops instead of streams to make the logic more clear and easier to step through #Resolved
private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); }
this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList()));
private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
This can use `computeIfAbsent` to be easier to read. Additionally it can be chained to get the Map from this Map, removing the get below. #Resolved
private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } }
if (!dependencyTree.containsKey(dependency)) {
private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
Another opportunity for simplification with `computeIfAbsent` #Resolved
private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } }
if(!versionToParents.containsKey(dependency.getVersion())) {
private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
I'm a bit confused here, is the above map an inverted map which contains all the versions of a given dependency, basically the structure: ``` <dependency name> <version> <libraries that use version> <version 2> <libraries that use version 2> ... ``` #Resolved
private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } }
logger.trace("Multiple version of the dependency {} included", dependencyNoVersion);
private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
Thx!
public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); }
return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1);
public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
Sure.
private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); }
filterConflicts();
private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
Yes - the resolveTree actually creates the map of this nature. Basically an n-ary tree with all the ancestors pointers.
private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencies.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencies.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null); } } } }
logger.trace("Multiple version of the dependency {} included", dependencyNoVersion);
private void resolveConflict(BomDependencyNoVersion dependencyNoVersion) { Map<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependencyNoVersion); if (versionToDependency.size() > 1) { List<String> versionList = versionToDependency.keySet().stream().sorted(new DependencyVersionComparator()).collect(Collectors.toList()); String eligibleVersion; logger.trace("Multiple version of the dependency {} included", dependencyNoVersion); if (coreDependencyNameToDependency.containsKey(dependencyNoVersion)) { eligibleVersion = coreDependencyNameToDependency.get(dependencyNoVersion).getVersion(); logger.trace(String.format("\tPicking the version used by azure-core - %s:%s", dependencyNoVersion, eligibleVersion)); } else { eligibleVersion = versionList.get(versionList.size() - 1); logger.trace(String.format("\tPicking the latest version %s:%s", dependencyNoVersion, eligibleVersion)); } BomDependency dependency = new BomDependency(dependencyNoVersion.getGroupId(), dependencyNoVersion.getArtifactId(), eligibleVersion); if (!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } for (String version : versionList) { if (!version.equals(eligibleVersion)) { makeDependencyInEligible(new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), version), null, eligibleVersion); } } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Map<BomDependency, String> bomIneligibleDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencies = new HashMap<>(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies){ if(inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if(externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.keySet().stream().anyMatch(key -> nameToVersionToChildrenDependencyTree.get(key).size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if(RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch(Exception ex) { System.out.println(ex); } } } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { if (!dependencyTree.containsKey(dependency)) { dependencyTree.put(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), new HashMap<>()); } HashMap<String, Collection<BomDependency>> versionToParents = dependencyTree.get(dependency); if(!versionToParents.containsKey(dependency.getVersion())) { versionToParents.put(dependency.getVersion(), new ArrayList<>()); } if(parentDependency != null) { versionToParents.get(dependency.getVersion()).add(parentDependency); } } private void makeDependencyInEligible(BomDependency dependency, String dropDependencyReason) { if(nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.put(dependency, dropDependencyReason); if(dropDependencyReason == null) { dropDependencyReason = dependency.toString(); logger.trace("\t\tDropping dependency {}", dependency.toString()); } else { logger.trace("\t\tDropping dependency {} due to dependency {}", dependency.toString(), dropDependencyReason); } String finalDropDependencyReason = dropDependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDropDependencyReason)); } } private void pickCoreDependencyRoots() { BomDependency coreDependency = inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); coreDependencies.put(BomDependency.convertTo(coreDependency), coreDependency); coreDependencies.putAll(getDependencies(coreDependency).stream().collect(Collectors.toMap(dependency -> BomDependency.convertTo(dependency), dependency -> dependency))); this.bomEligibleDependencies.addAll(coreDependencies.values().stream().filter(dependency -> !externalDependencies.contains(dependency)).collect(Collectors.toList())); } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies.keySet()); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.containsKey(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } public void generateReport() { List<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.containsKey(dependency)).collect(Collectors.toList()); if (droppedDependencies.size() == 0) { return; } logger.info("We dropped the following dependencies from the input list."); for (BomDependency dependency : droppedDependencies) { logger.info("Dependency {}, Reason {}", dependency.toString(), bomIneligibleDependencies.get(dependency)); } } }
class DependencyAnalyzer { private Set<BomDependency> inputDependencies = new HashSet<>(); private Set<BomDependency> externalDependencies = new HashSet<>(); private Set<BomDependency> bomEligibleDependencies = new HashSet<>(); private Set<BomDependency> bomIneligibleDependencies = new HashSet<>(); private Map<BomDependencyNoVersion, BomDependency> coreDependencyNameToDependency = new HashMap<>(); private Map<BomDependency, BomDependencyErrorInfo> errorInfo = new HashMap(); private Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> nameToVersionToChildrenDependencyTree = new TreeMap<>(new Comparator<BomDependencyNoVersion>() { @Override public int compare(BomDependencyNoVersion o1, BomDependencyNoVersion o2) { return (o1.getGroupId() + o1.getArtifactId()).compareTo(o1.getGroupId() + o2.getArtifactId()); } }); private static Logger logger = LoggerFactory.getLogger(BomGenerator.class); DependencyAnalyzer(Collection<BomDependency> inputDependencies, Collection<BomDependency> externalDependencies) { if (inputDependencies != null) { this.inputDependencies.addAll(inputDependencies); } if (externalDependencies != null) { this.externalDependencies.addAll(externalDependencies); } } public Collection<BomDependency> getBomEligibleDependencies() { return this.bomEligibleDependencies; } public void reduce() { analyze(); generateReport(); this.bomEligibleDependencies.retainAll(this.inputDependencies); } public boolean validate() { analyze(); return nameToVersionToChildrenDependencyTree.values().stream().anyMatch(value -> value.size() > 1); } private void analyze() { pickCoreDependencyRoots(); resolveTree(); resolveConflicts(); filterConflicts(); } private void generateReport() { Set<BomDependency> droppedDependencies = inputDependencies.stream().filter(dependency -> bomIneligibleDependencies.contains(dependency)).collect(Collectors.toSet()); if (droppedDependencies.size() == 0) { return; } if (errorInfo.size() > 0) { errorInfo.keySet().stream().forEach(key -> { if (droppedDependencies.contains(key)) { var conflictingDependencies = errorInfo.get(key).getConflictingDependencies(); var expectedDependency = errorInfo.get(key).getExpectedDependency(); if (expectedDependency != null) { logger.info("Dropped dependency {}.", key.toString(), expectedDependency); } conflictingDependencies.stream().forEach(conflictingDependency -> logger.info("\t\tIncludes dependency {}. Expected dependency {}", conflictingDependency.getActualDependency(), conflictingDependency.getExpectedDependency())); } }); } } private BomDependency getAzureCoreDependencyFromInput() { return inputDependencies.stream().filter(dependency -> dependency.getArtifactId().equals("azure-core")).findFirst().get(); } private void pickCoreDependencyRoots() { BomDependency coreDependency = getAzureCoreDependencyFromInput(); var coreDependencies = getDependencies(coreDependency); coreDependencyNameToDependency.put(toBomDependencyNoVersion(coreDependency), coreDependency); coreDependencies.forEach(dependency -> coreDependencyNameToDependency.put(toBomDependencyNoVersion(dependency), dependency)); for(var dependency : coreDependencyNameToDependency.values()) { if(!externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } } /* Create a tree map of all the input binaries into the following map. * {groupId_artifactId}: {v1} : {all ancestors that include this binary.} * : {v2} : {all ancestors that include this binary.} * : {v3} : {all ancestors that include this binary.} */ private void resolveTree() { for (MavenDependency gaLibrary : inputDependencies) { try { BomDependency parentDependency = new BomDependency(gaLibrary.getGroupId(), gaLibrary.getArtifactId(), gaLibrary.getVersion()); addDependencyToDependencyTree(parentDependency, null, nameToVersionToChildrenDependencyTree); List<BomDependency> dependencies = getDependencies(gaLibrary); for (BomDependency dependency : dependencies) { if (dependency.getScope() == ScopeType.TEST) { continue; } if (RESOLVED_EXCLUSION_LIST.contains(dependency.getArtifactId())) { continue; } BomDependency childDependency = new BomDependency(dependency.getGroupId(), dependency.getArtifactId(), dependency.getVersion()); addDependencyToDependencyTree(childDependency, parentDependency, nameToVersionToChildrenDependencyTree); } } catch (Exception ex) { System.out.println(ex); } } } private static List<BomDependency> getDependencies(MavenDependency dependency) { try { MavenResolvedArtifact mavenResolvedArtifact = null; mavenResolvedArtifact = getMavenResolver() .addDependency(dependency) .resolve() .withoutTransitivity() .asSingleResolvedArtifact(); return Arrays.stream(mavenResolvedArtifact.getDependencies()).map(mavenDependency -> new BomDependency(mavenDependency.getCoordinate().getGroupId(), mavenDependency.getCoordinate().getArtifactId(), mavenDependency.getCoordinate().getVersion(), mavenDependency.getScope())).collect(Collectors.toList()); } catch (Exception ex) { logger.error(ex.toString()); } return new ArrayList<>(); } private static MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> getMavenResolver() { return Maven.configureResolver().withMavenCentralRepo(true); } private static void addDependencyToDependencyTree(BomDependency dependency, BomDependency parentDependency, Map<BomDependencyNoVersion, HashMap<String, Collection<BomDependency>>> dependencyTree) { dependencyTree.computeIfAbsent(new BomDependencyNoVersion(dependency.getGroupId(), dependency.getArtifactId()), key -> new HashMap<>()); var value = dependencyTree.get(dependency).computeIfAbsent(dependency.getVersion(), key -> new ArrayList<>()); if(parentDependency != null) { value.add(parentDependency); } } private void updateErrorInfo(BomDependency droppedDependency, String expectedVersion) { if (!errorInfo.containsKey(droppedDependency)) { errorInfo.put(droppedDependency, new BomDependencyErrorInfo(new BomDependency(droppedDependency.getGroupId(), droppedDependency.getArtifactId(), expectedVersion))); } } private void updateErrorInfo(BomDependency droppedDependency, BomDependency actualDependency, String expectedVersion) { updateErrorInfo(droppedDependency, expectedVersion); errorInfo.get(droppedDependency).addConflictingDependency(actualDependency, new BomDependency(actualDependency.getGroupId(), actualDependency.getArtifactId(), expectedVersion)); } private void makeDependencyInEligible(BomDependency dependency, BomDependency dependencyReason, String expectedVersion) { if (nameToVersionToChildrenDependencyTree.containsKey(dependency)) { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(dependency); bomIneligibleDependencies.add(dependency); if (dependencyReason == null) { dependencyReason = dependency; updateErrorInfo(dependency, expectedVersion); } else { updateErrorInfo(dependency, dependencyReason, expectedVersion); } BomDependency finalDependencyReason = dependencyReason; versionToDependency.get(dependency.getVersion()).forEach(parent -> makeDependencyInEligible(parent, finalDependencyReason, expectedVersion)); } } private void resolveConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach(this::resolveConflict); bomEligibleDependencies.removeAll(bomIneligibleDependencies); } private void filterConflicts() { nameToVersionToChildrenDependencyTree.keySet().stream().forEach( key -> { HashMap<String, Collection<BomDependency>> versionToDependency = nameToVersionToChildrenDependencyTree.get(key); if (versionToDependency.size() == 1) { BomDependency dependency = new BomDependency(key.getGroupId(), key.getArtifactId(), versionToDependency.keySet().stream().findFirst().get()); if (!bomIneligibleDependencies.contains(dependency) && !externalDependencies.contains(dependency)) { bomEligibleDependencies.add(dependency); } } }); } }
should we assume headers aren't null ?
public static String getETag(HttpHeaders headers) { if (headers == null) { return null; } return headers.getValue("ETag"); }
}
public static String getETag(HttpHeaders headers) { return headers.getValue("ETag"); }
class ModelHelper { private static final SerializerAdapter SERIALIZER = new JacksonAdapter(); private static final ClientLogger LOGGER = new ClientLogger(ModelHelper.class); private static final long MAX_FILE_PUT_RANGE_BYTES = 4 * Constants.MB; private static final int FILE_DEFAULT_NUMBER_OF_BUFFERS = 8; /** * Fills in default values for a ParallelTransferOptions where no value has been set. This will construct a new * object for safety. * * @param other The options to fill in defaults. * @return An object with defaults filled in for null values in the original. */ public static ParallelTransferOptions populateAndApplyDefaults(ParallelTransferOptions other) { other = other == null ? new ParallelTransferOptions() : other; if (other.getBlockSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.blockSize", other.getBlockSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } if (other.getMaxSingleUploadSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.maxSingleUploadSize", other.getMaxSingleUploadSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } Long blockSize = other.getBlockSizeLong(); if (blockSize == null) { blockSize = MAX_FILE_PUT_RANGE_BYTES; } Integer maxConcurrency = other.getMaxConcurrency(); if (maxConcurrency == null) { maxConcurrency = FILE_DEFAULT_NUMBER_OF_BUFFERS; } Long maxSingleUploadSize = other.getMaxSingleUploadSizeLong(); if (maxSingleUploadSize == null) { maxSingleUploadSize = MAX_FILE_PUT_RANGE_BYTES; } return new ParallelTransferOptions() .setBlockSizeLong(blockSize) .setMaxConcurrency(maxConcurrency) .setProgressReceiver(other.getProgressReceiver()) .setMaxSingleUploadSizeLong(maxSingleUploadSize); } /** * Converts an internal type to a public type. * * @param option {@link ShareSnapshotsDeleteOptionType} * @return {@link DeleteSnapshotsOptionType} */ public static DeleteSnapshotsOptionType toDeleteSnapshotsOptionType(ShareSnapshotsDeleteOptionType option) { if (option == null) { return null; } switch (option) { case INCLUDE: return DeleteSnapshotsOptionType.INCLUDE; case INCLUDE_WITH_LEASED: return DeleteSnapshotsOptionType.INCLUDE_LEASED; default: throw LOGGER.logExceptionAsError(new IllegalArgumentException("Invalid " + option.getClass())); } } /** * Transforms {@link ShareItemInternal} into a public {@link ShareItem}. * * @param shareItemInternal {@link ShareItemInternal} * @return {@link ShareItem} */ public static ShareItem populateShareItem(ShareItemInternal shareItemInternal) { ShareItem item = new ShareItem(); item.setName(shareItemInternal.getName()); item.setSnapshot(shareItemInternal.getSnapshot()); item.setDeleted(shareItemInternal.isDeleted()); item.setVersion(shareItemInternal.getVersion()); item.setProperties(populateShareProperties(shareItemInternal.getProperties())); item.setMetadata(shareItemInternal.getMetadata()); return item; } /** * Transforms {@link SharePropertiesInternal} into a public {@link ShareProperties}. * * @param sharePropertiesInternal {@link SharePropertiesInternal} * @return {@link ShareProperties} */ public static ShareProperties populateShareProperties(SharePropertiesInternal sharePropertiesInternal) { ShareProperties properties = new ShareProperties(); properties.setLastModified(sharePropertiesInternal.getLastModified()); properties.setETag(sharePropertiesInternal.getETag()); properties.setQuota(sharePropertiesInternal.getQuota()); properties.setProvisionedIops(sharePropertiesInternal.getProvisionedIops()); properties.setProvisionedIngressMBps(sharePropertiesInternal.getProvisionedIngressMBps()); properties.setProvisionedEgressMBps(sharePropertiesInternal.getProvisionedEgressMBps()); properties.setNextAllowedQuotaDowngradeTime(sharePropertiesInternal.getNextAllowedQuotaDowngradeTime()); properties.setDeletedTime(sharePropertiesInternal.getDeletedTime()); properties.setRemainingRetentionDays(sharePropertiesInternal.getRemainingRetentionDays()); properties.setAccessTier(sharePropertiesInternal.getAccessTier()); properties.setAccessTierChangeTime(sharePropertiesInternal.getAccessTierChangeTime()); properties.setAccessTierTransitionState(sharePropertiesInternal.getAccessTierTransitionState()); properties.setLeaseStatus(sharePropertiesInternal.getLeaseStatus()); properties.setLeaseState(sharePropertiesInternal.getLeaseState()); properties.setLeaseDuration(sharePropertiesInternal.getLeaseDuration()); properties.setProtocols(parseShareProtocols(sharePropertiesInternal.getEnabledProtocols())); properties.setRootSquash(sharePropertiesInternal.getRootSquash()); properties.setMetadata(sharePropertiesInternal.getMetadata()); return properties; } /** * Parses a {@code String} into a {@code ShareProtocols}. Unrecognized protocols will be ignored. * * @param str The string to parse. * @return A {@code ShareProtocols} represented by the string. */ public static ShareProtocols parseShareProtocols(String str) { if (str == null) { return null; } ShareProtocols protocols = new ShareProtocols(); for (String s : str.split(",")) { switch (s) { case Constants.HeaderConstants.SMB_PROTOCOL: protocols.setSmbEnabled(true); break; case Constants.HeaderConstants.NFS_PROTOCOL: protocols.setNfsEnabled(true); break; default: } } return protocols; } public static ServicesListSharesSegmentHeaders transformListSharesHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ServicesListSharesSegmentHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileDownloadHeaders transformFileDownloadHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ShareFileDownloadHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileItemProperties transformFileProperty(FileProperty property) { if (property == null) { return null; } return new ShareFileItemProperties(property.getCreationTime(), property.getLastAccessTime(), property.getLastWriteTime(), property.getChangeTime(), property.getLastModified(), property.getEtag()); } }
class ModelHelper { private static final SerializerAdapter SERIALIZER = new JacksonAdapter(); private static final ClientLogger LOGGER = new ClientLogger(ModelHelper.class); private static final long MAX_FILE_PUT_RANGE_BYTES = 4 * Constants.MB; private static final int FILE_DEFAULT_NUMBER_OF_BUFFERS = 8; /** * Fills in default values for a ParallelTransferOptions where no value has been set. This will construct a new * object for safety. * * @param other The options to fill in defaults. * @return An object with defaults filled in for null values in the original. */ public static ParallelTransferOptions populateAndApplyDefaults(ParallelTransferOptions other) { other = other == null ? new ParallelTransferOptions() : other; if (other.getBlockSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.blockSize", other.getBlockSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } if (other.getMaxSingleUploadSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.maxSingleUploadSize", other.getMaxSingleUploadSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } Long blockSize = other.getBlockSizeLong(); if (blockSize == null) { blockSize = MAX_FILE_PUT_RANGE_BYTES; } Integer maxConcurrency = other.getMaxConcurrency(); if (maxConcurrency == null) { maxConcurrency = FILE_DEFAULT_NUMBER_OF_BUFFERS; } Long maxSingleUploadSize = other.getMaxSingleUploadSizeLong(); if (maxSingleUploadSize == null) { maxSingleUploadSize = MAX_FILE_PUT_RANGE_BYTES; } return new ParallelTransferOptions() .setBlockSizeLong(blockSize) .setMaxConcurrency(maxConcurrency) .setProgressReceiver(other.getProgressReceiver()) .setMaxSingleUploadSizeLong(maxSingleUploadSize); } /** * Converts an internal type to a public type. * * @param option {@link ShareSnapshotsDeleteOptionType} * @return {@link DeleteSnapshotsOptionType} */ public static DeleteSnapshotsOptionType toDeleteSnapshotsOptionType(ShareSnapshotsDeleteOptionType option) { if (option == null) { return null; } switch (option) { case INCLUDE: return DeleteSnapshotsOptionType.INCLUDE; case INCLUDE_WITH_LEASED: return DeleteSnapshotsOptionType.INCLUDE_LEASED; default: throw LOGGER.logExceptionAsError(new IllegalArgumentException("Invalid " + option.getClass())); } } /** * Transforms {@link ShareItemInternal} into a public {@link ShareItem}. * * @param shareItemInternal {@link ShareItemInternal} * @return {@link ShareItem} */ public static ShareItem populateShareItem(ShareItemInternal shareItemInternal) { ShareItem item = new ShareItem(); item.setName(shareItemInternal.getName()); item.setSnapshot(shareItemInternal.getSnapshot()); item.setDeleted(shareItemInternal.isDeleted()); item.setVersion(shareItemInternal.getVersion()); item.setProperties(populateShareProperties(shareItemInternal.getProperties())); item.setMetadata(shareItemInternal.getMetadata()); return item; } /** * Transforms {@link SharePropertiesInternal} into a public {@link ShareProperties}. * * @param sharePropertiesInternal {@link SharePropertiesInternal} * @return {@link ShareProperties} */ public static ShareProperties populateShareProperties(SharePropertiesInternal sharePropertiesInternal) { ShareProperties properties = new ShareProperties(); properties.setLastModified(sharePropertiesInternal.getLastModified()); properties.setETag(sharePropertiesInternal.getETag()); properties.setQuota(sharePropertiesInternal.getQuota()); properties.setProvisionedIops(sharePropertiesInternal.getProvisionedIops()); properties.setProvisionedIngressMBps(sharePropertiesInternal.getProvisionedIngressMBps()); properties.setProvisionedEgressMBps(sharePropertiesInternal.getProvisionedEgressMBps()); properties.setNextAllowedQuotaDowngradeTime(sharePropertiesInternal.getNextAllowedQuotaDowngradeTime()); properties.setDeletedTime(sharePropertiesInternal.getDeletedTime()); properties.setRemainingRetentionDays(sharePropertiesInternal.getRemainingRetentionDays()); properties.setAccessTier(sharePropertiesInternal.getAccessTier()); properties.setAccessTierChangeTime(sharePropertiesInternal.getAccessTierChangeTime()); properties.setAccessTierTransitionState(sharePropertiesInternal.getAccessTierTransitionState()); properties.setLeaseStatus(sharePropertiesInternal.getLeaseStatus()); properties.setLeaseState(sharePropertiesInternal.getLeaseState()); properties.setLeaseDuration(sharePropertiesInternal.getLeaseDuration()); properties.setProtocols(parseShareProtocols(sharePropertiesInternal.getEnabledProtocols())); properties.setRootSquash(sharePropertiesInternal.getRootSquash()); properties.setMetadata(sharePropertiesInternal.getMetadata()); return properties; } /** * Parses a {@code String} into a {@code ShareProtocols}. Unrecognized protocols will be ignored. * * @param str The string to parse. * @return A {@code ShareProtocols} represented by the string. */ public static ShareProtocols parseShareProtocols(String str) { if (str == null) { return null; } ShareProtocols protocols = new ShareProtocols(); for (String s : str.split(",")) { switch (s) { case Constants.HeaderConstants.SMB_PROTOCOL: protocols.setSmbEnabled(true); break; case Constants.HeaderConstants.NFS_PROTOCOL: protocols.setNfsEnabled(true); break; default: } } return protocols; } public static ServicesListSharesSegmentHeaders transformListSharesHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ServicesListSharesSegmentHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileDownloadHeaders transformFileDownloadHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ShareFileDownloadHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileItemProperties transformFileProperty(FileProperty property) { if (property == null) { return null; } return new InternalShareFileItemProperties(property.getCreationTime(), property.getLastAccessTime(), property.getLastWriteTime(), property.getChangeTime(), property.getLastModified(), property.getEtag()); } }
that seems like a fair assumption. Will remove.
public static String getETag(HttpHeaders headers) { if (headers == null) { return null; } return headers.getValue("ETag"); }
}
public static String getETag(HttpHeaders headers) { return headers.getValue("ETag"); }
class ModelHelper { private static final SerializerAdapter SERIALIZER = new JacksonAdapter(); private static final ClientLogger LOGGER = new ClientLogger(ModelHelper.class); private static final long MAX_FILE_PUT_RANGE_BYTES = 4 * Constants.MB; private static final int FILE_DEFAULT_NUMBER_OF_BUFFERS = 8; /** * Fills in default values for a ParallelTransferOptions where no value has been set. This will construct a new * object for safety. * * @param other The options to fill in defaults. * @return An object with defaults filled in for null values in the original. */ public static ParallelTransferOptions populateAndApplyDefaults(ParallelTransferOptions other) { other = other == null ? new ParallelTransferOptions() : other; if (other.getBlockSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.blockSize", other.getBlockSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } if (other.getMaxSingleUploadSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.maxSingleUploadSize", other.getMaxSingleUploadSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } Long blockSize = other.getBlockSizeLong(); if (blockSize == null) { blockSize = MAX_FILE_PUT_RANGE_BYTES; } Integer maxConcurrency = other.getMaxConcurrency(); if (maxConcurrency == null) { maxConcurrency = FILE_DEFAULT_NUMBER_OF_BUFFERS; } Long maxSingleUploadSize = other.getMaxSingleUploadSizeLong(); if (maxSingleUploadSize == null) { maxSingleUploadSize = MAX_FILE_PUT_RANGE_BYTES; } return new ParallelTransferOptions() .setBlockSizeLong(blockSize) .setMaxConcurrency(maxConcurrency) .setProgressReceiver(other.getProgressReceiver()) .setMaxSingleUploadSizeLong(maxSingleUploadSize); } /** * Converts an internal type to a public type. * * @param option {@link ShareSnapshotsDeleteOptionType} * @return {@link DeleteSnapshotsOptionType} */ public static DeleteSnapshotsOptionType toDeleteSnapshotsOptionType(ShareSnapshotsDeleteOptionType option) { if (option == null) { return null; } switch (option) { case INCLUDE: return DeleteSnapshotsOptionType.INCLUDE; case INCLUDE_WITH_LEASED: return DeleteSnapshotsOptionType.INCLUDE_LEASED; default: throw LOGGER.logExceptionAsError(new IllegalArgumentException("Invalid " + option.getClass())); } } /** * Transforms {@link ShareItemInternal} into a public {@link ShareItem}. * * @param shareItemInternal {@link ShareItemInternal} * @return {@link ShareItem} */ public static ShareItem populateShareItem(ShareItemInternal shareItemInternal) { ShareItem item = new ShareItem(); item.setName(shareItemInternal.getName()); item.setSnapshot(shareItemInternal.getSnapshot()); item.setDeleted(shareItemInternal.isDeleted()); item.setVersion(shareItemInternal.getVersion()); item.setProperties(populateShareProperties(shareItemInternal.getProperties())); item.setMetadata(shareItemInternal.getMetadata()); return item; } /** * Transforms {@link SharePropertiesInternal} into a public {@link ShareProperties}. * * @param sharePropertiesInternal {@link SharePropertiesInternal} * @return {@link ShareProperties} */ public static ShareProperties populateShareProperties(SharePropertiesInternal sharePropertiesInternal) { ShareProperties properties = new ShareProperties(); properties.setLastModified(sharePropertiesInternal.getLastModified()); properties.setETag(sharePropertiesInternal.getETag()); properties.setQuota(sharePropertiesInternal.getQuota()); properties.setProvisionedIops(sharePropertiesInternal.getProvisionedIops()); properties.setProvisionedIngressMBps(sharePropertiesInternal.getProvisionedIngressMBps()); properties.setProvisionedEgressMBps(sharePropertiesInternal.getProvisionedEgressMBps()); properties.setNextAllowedQuotaDowngradeTime(sharePropertiesInternal.getNextAllowedQuotaDowngradeTime()); properties.setDeletedTime(sharePropertiesInternal.getDeletedTime()); properties.setRemainingRetentionDays(sharePropertiesInternal.getRemainingRetentionDays()); properties.setAccessTier(sharePropertiesInternal.getAccessTier()); properties.setAccessTierChangeTime(sharePropertiesInternal.getAccessTierChangeTime()); properties.setAccessTierTransitionState(sharePropertiesInternal.getAccessTierTransitionState()); properties.setLeaseStatus(sharePropertiesInternal.getLeaseStatus()); properties.setLeaseState(sharePropertiesInternal.getLeaseState()); properties.setLeaseDuration(sharePropertiesInternal.getLeaseDuration()); properties.setProtocols(parseShareProtocols(sharePropertiesInternal.getEnabledProtocols())); properties.setRootSquash(sharePropertiesInternal.getRootSquash()); properties.setMetadata(sharePropertiesInternal.getMetadata()); return properties; } /** * Parses a {@code String} into a {@code ShareProtocols}. Unrecognized protocols will be ignored. * * @param str The string to parse. * @return A {@code ShareProtocols} represented by the string. */ public static ShareProtocols parseShareProtocols(String str) { if (str == null) { return null; } ShareProtocols protocols = new ShareProtocols(); for (String s : str.split(",")) { switch (s) { case Constants.HeaderConstants.SMB_PROTOCOL: protocols.setSmbEnabled(true); break; case Constants.HeaderConstants.NFS_PROTOCOL: protocols.setNfsEnabled(true); break; default: } } return protocols; } public static ServicesListSharesSegmentHeaders transformListSharesHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ServicesListSharesSegmentHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileDownloadHeaders transformFileDownloadHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ShareFileDownloadHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileItemProperties transformFileProperty(FileProperty property) { if (property == null) { return null; } return new ShareFileItemProperties(property.getCreationTime(), property.getLastAccessTime(), property.getLastWriteTime(), property.getChangeTime(), property.getLastModified(), property.getEtag()); } }
class ModelHelper { private static final SerializerAdapter SERIALIZER = new JacksonAdapter(); private static final ClientLogger LOGGER = new ClientLogger(ModelHelper.class); private static final long MAX_FILE_PUT_RANGE_BYTES = 4 * Constants.MB; private static final int FILE_DEFAULT_NUMBER_OF_BUFFERS = 8; /** * Fills in default values for a ParallelTransferOptions where no value has been set. This will construct a new * object for safety. * * @param other The options to fill in defaults. * @return An object with defaults filled in for null values in the original. */ public static ParallelTransferOptions populateAndApplyDefaults(ParallelTransferOptions other) { other = other == null ? new ParallelTransferOptions() : other; if (other.getBlockSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.blockSize", other.getBlockSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } if (other.getMaxSingleUploadSizeLong() != null) { StorageImplUtils.assertInBounds("ParallelTransferOptions.maxSingleUploadSize", other.getMaxSingleUploadSizeLong(), 1, MAX_FILE_PUT_RANGE_BYTES); } Long blockSize = other.getBlockSizeLong(); if (blockSize == null) { blockSize = MAX_FILE_PUT_RANGE_BYTES; } Integer maxConcurrency = other.getMaxConcurrency(); if (maxConcurrency == null) { maxConcurrency = FILE_DEFAULT_NUMBER_OF_BUFFERS; } Long maxSingleUploadSize = other.getMaxSingleUploadSizeLong(); if (maxSingleUploadSize == null) { maxSingleUploadSize = MAX_FILE_PUT_RANGE_BYTES; } return new ParallelTransferOptions() .setBlockSizeLong(blockSize) .setMaxConcurrency(maxConcurrency) .setProgressReceiver(other.getProgressReceiver()) .setMaxSingleUploadSizeLong(maxSingleUploadSize); } /** * Converts an internal type to a public type. * * @param option {@link ShareSnapshotsDeleteOptionType} * @return {@link DeleteSnapshotsOptionType} */ public static DeleteSnapshotsOptionType toDeleteSnapshotsOptionType(ShareSnapshotsDeleteOptionType option) { if (option == null) { return null; } switch (option) { case INCLUDE: return DeleteSnapshotsOptionType.INCLUDE; case INCLUDE_WITH_LEASED: return DeleteSnapshotsOptionType.INCLUDE_LEASED; default: throw LOGGER.logExceptionAsError(new IllegalArgumentException("Invalid " + option.getClass())); } } /** * Transforms {@link ShareItemInternal} into a public {@link ShareItem}. * * @param shareItemInternal {@link ShareItemInternal} * @return {@link ShareItem} */ public static ShareItem populateShareItem(ShareItemInternal shareItemInternal) { ShareItem item = new ShareItem(); item.setName(shareItemInternal.getName()); item.setSnapshot(shareItemInternal.getSnapshot()); item.setDeleted(shareItemInternal.isDeleted()); item.setVersion(shareItemInternal.getVersion()); item.setProperties(populateShareProperties(shareItemInternal.getProperties())); item.setMetadata(shareItemInternal.getMetadata()); return item; } /** * Transforms {@link SharePropertiesInternal} into a public {@link ShareProperties}. * * @param sharePropertiesInternal {@link SharePropertiesInternal} * @return {@link ShareProperties} */ public static ShareProperties populateShareProperties(SharePropertiesInternal sharePropertiesInternal) { ShareProperties properties = new ShareProperties(); properties.setLastModified(sharePropertiesInternal.getLastModified()); properties.setETag(sharePropertiesInternal.getETag()); properties.setQuota(sharePropertiesInternal.getQuota()); properties.setProvisionedIops(sharePropertiesInternal.getProvisionedIops()); properties.setProvisionedIngressMBps(sharePropertiesInternal.getProvisionedIngressMBps()); properties.setProvisionedEgressMBps(sharePropertiesInternal.getProvisionedEgressMBps()); properties.setNextAllowedQuotaDowngradeTime(sharePropertiesInternal.getNextAllowedQuotaDowngradeTime()); properties.setDeletedTime(sharePropertiesInternal.getDeletedTime()); properties.setRemainingRetentionDays(sharePropertiesInternal.getRemainingRetentionDays()); properties.setAccessTier(sharePropertiesInternal.getAccessTier()); properties.setAccessTierChangeTime(sharePropertiesInternal.getAccessTierChangeTime()); properties.setAccessTierTransitionState(sharePropertiesInternal.getAccessTierTransitionState()); properties.setLeaseStatus(sharePropertiesInternal.getLeaseStatus()); properties.setLeaseState(sharePropertiesInternal.getLeaseState()); properties.setLeaseDuration(sharePropertiesInternal.getLeaseDuration()); properties.setProtocols(parseShareProtocols(sharePropertiesInternal.getEnabledProtocols())); properties.setRootSquash(sharePropertiesInternal.getRootSquash()); properties.setMetadata(sharePropertiesInternal.getMetadata()); return properties; } /** * Parses a {@code String} into a {@code ShareProtocols}. Unrecognized protocols will be ignored. * * @param str The string to parse. * @return A {@code ShareProtocols} represented by the string. */ public static ShareProtocols parseShareProtocols(String str) { if (str == null) { return null; } ShareProtocols protocols = new ShareProtocols(); for (String s : str.split(",")) { switch (s) { case Constants.HeaderConstants.SMB_PROTOCOL: protocols.setSmbEnabled(true); break; case Constants.HeaderConstants.NFS_PROTOCOL: protocols.setNfsEnabled(true); break; default: } } return protocols; } public static ServicesListSharesSegmentHeaders transformListSharesHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ServicesListSharesSegmentHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileDownloadHeaders transformFileDownloadHeaders(HttpHeaders headers) { if (headers == null) { return null; } try { return SERIALIZER.deserialize(headers, ShareFileDownloadHeaders.class); } catch (IOException e) { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } public static ShareFileItemProperties transformFileProperty(FileProperty property) { if (property == null) { return null; } return new InternalShareFileItemProperties(property.getCreationTime(), property.getLastAccessTime(), property.getLastWriteTime(), property.getChangeTime(), property.getLastModified(), property.getEtag()); } }
I dont think this is true, the live test pipeline should run in LIVE mode. What is the expected behaviour that you are trying to accomplish?
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* The Live test pipeline runs in recording mode, so we need to generate random groupId's. We pull this value from the test.yml file which is only used in CI and Live pipeline. */ if (getTestMode() == TestMode.RECORD && GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
The Live test pipeline runs in recording mode, so we
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final Boolean GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE = Configuration.getGlobalConfiguration() .get("CALLINGSERVER_GENERATE_RANDOM_GROUPID_IN_RECORD_MODE", true); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
Ours seems to run in Record, maybe it is configured incorrectly
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* The Live test pipeline runs in recording mode, so we need to generate random groupId's. We pull this value from the test.yml file which is only used in CI and Live pipeline. */ if (getTestMode() == TestMode.RECORD && GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
The Live test pipeline runs in recording mode, so we
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final Boolean GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE = Configuration.getGlobalConfiguration() .get("CALLINGSERVER_GENERATE_RANDOM_GROUPID_IN_RECORD_MODE", true); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
[Link to Live Pipeline test run](https://dev.azure.com/azure-sdk/internal/_build/results?buildId=960406&view=logs&j=186fb226-6090-5bc4-037b-dfee8c37ef57&t=24176779-b866-57d9-80d3-8324e431459f&l=19)
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* The Live test pipeline runs in recording mode, so we need to generate random groupId's. We pull this value from the test.yml file which is only used in CI and Live pipeline. */ if (getTestMode() == TestMode.RECORD && GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
The Live test pipeline runs in recording mode, so we
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final Boolean GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE = Configuration.getGlobalConfiguration() .get("CALLINGSERVER_GENERATE_RANDOM_GROUPID_IN_RECORD_MODE", true); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
Sorry I just double checked, you are right, it is in record mode.
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* The Live test pipeline runs in recording mode, so we need to generate random groupId's. We pull this value from the test.yml file which is only used in CI and Live pipeline. */ if (getTestMode() == TestMode.RECORD && GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
The Live test pipeline runs in recording mode, so we
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final Boolean GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE = Configuration.getGlobalConfiguration() .get("CALLINGSERVER_GENERATE_RANDOM_GROUPID_IN_RECORD_MODE", true); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
I just talked with Alan Zimmer, and it seems the tests were temporarily switched to RECORD because LIVE was causing issues in azure-core-tests. This is going to be changed back soon. I will look at removing this work around for our tests, and also forcing LIVE mode for these tests, just incase there is another change.
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* The Live test pipeline runs in recording mode, so we need to generate random groupId's. We pull this value from the test.yml file which is only used in CI and Live pipeline. */ if (getTestMode() == TestMode.RECORD && GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
The Live test pipeline runs in recording mode, so we
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final Boolean GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE = Configuration.getGlobalConfiguration() .get("CALLINGSERVER_GENERATE_RANDOM_GROUPID_IN_RECORD_MODE", true); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
Sounds good, thanks for clarifying.
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* The Live test pipeline runs in recording mode, so we need to generate random groupId's. We pull this value from the test.yml file which is only used in CI and Live pipeline. */ if (getTestMode() == TestMode.RECORD && GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
The Live test pipeline runs in recording mode, so we
protected String getGroupId(String testName) { /* If tests are running in live mode, we want them to all have unique groupId's so they do not conflict with other recording tests running in live mode. */ if (getTestMode() == TestMode.LIVE) { return UUID.randomUUID().toString(); } /* For recording tests we need to make sure the groupId matches the recorded groupId, or the call will fail. */ return UUID.nameUUIDFromBytes(testName.getBytes()).toString(); }
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final Boolean GENERATE_RANDOM_GROUP_IDENTIFIER_RECORD_MODE = Configuration.getGlobalConfiguration() .get("CALLINGSERVER_GENERATE_RANDOM_GROUPID_IN_RECORD_MODE", true); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
class CallingServerTestBase extends TestBase { protected static final TestMode TEST_MODE = initializeTestMode(); protected static final String CONNECTION_STRING = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_CONNECTION_STRING", "endpoint=https: protected static final String AZURE_TENANT_ID = Configuration.getGlobalConfiguration() .get("COMMUNICATION_LIVETEST_STATIC_RESOURCE_IDENTIFIER", "016a7064-0581-40b9-be73-6dde64d69d72"); protected static final String FROM_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String TO_PHONE_NUMBER = Configuration.getGlobalConfiguration() .get("AZURE_PHONE_NUMBER", "+15551234567"); protected static final String CALLBACK_URI = Configuration.getGlobalConfiguration() .get("CALLBACK_URI", "https: protected static final String AUDIO_FILE_URI = Configuration.getGlobalConfiguration() .get("AUDIO_FILE_URI", "https: protected static final String METADATA_URL = Configuration.getGlobalConfiguration() .get("METADATA_URL", "https: protected static final String VIDEO_URL = Configuration.getGlobalConfiguration() .get("VIDEO_URL", "https: protected static final String CONTENT_URL_404 = Configuration.getGlobalConfiguration() .get("CONTENT_URL_404", "https: private static final StringJoiner JSON_PROPERTIES_TO_REDACT = new StringJoiner("\":\"|\"", "\"", "\":\"") .add("to"); private static final Pattern JSON_PROPERTY_VALUE_REDACTION_PATTERN = Pattern.compile(String.format("(?:%s)(.*?)(?:\",|\"}
Nice catch! o_0
public void visitToken(DetailAST token) { switch (token.getType()) { case TokenTypes.CLASS_DEF: classNameStack.offer(token.findFirstToken(TokenTypes.IDENT).getText()); break; case TokenTypes.METHOD_DEF: if (!isFluentMethod(token)) { return; } checkMethodNamePrefix(token); if (token.findFirstToken(TokenTypes.LITERAL_THROWS) != null) { log(token, String.format( "Fluent Method ''%s'' must not be declared to throw any checked exceptions.", token.findFirstToken(TokenTypes.IDENT).getText())); } break; default: break; } }
"Fluent Method ''%s'' must not be declared to throw any checked exceptions.",
public void visitToken(DetailAST token) { switch (token.getType()) { case TokenTypes.CLASS_DEF: classNameStack.offer(token.findFirstToken(TokenTypes.IDENT).getText()); break; case TokenTypes.METHOD_DEF: if (!isFluentMethod(token)) { return; } checkMethodNamePrefix(token); if (token.findFirstToken(TokenTypes.LITERAL_THROWS) != null) { log(token, String.format( "Fluent Method ''%s'' must not be declared to throw any checked exceptions.", token.findFirstToken(TokenTypes.IDENT).getText())); } break; default: break; } }
class names when traversals the AST tree. */ private final Queue<String> classNameStack = Collections.asLifoQueue(new ArrayDeque<>()); /** * Adds words that methods in fluent classes should not be prefixed with. * @param avoidStartWords the starting strings that should not start with in fluent method */ public final void setAvoidStartWords(String... avoidStartWords) { Collections.addAll(this.avoidStartWords, avoidStartWords); }
class names when traversals the AST tree. */ private final Queue<String> classNameStack = Collections.asLifoQueue(new ArrayDeque<>()); /** * Adds words that methods in fluent classes should not be prefixed with. * @param avoidStartWords the starting strings that should not start with in fluent method */ public final void setAvoidStartWords(String... avoidStartWords) { Collections.addAll(this.avoidStartWords, avoidStartWords); }
Should we make it so that whenever we call `printf` we add a new line at the end?
public void testEventProcessorBuilderMissingProperties() { assertThrows(NullPointerException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); }
System.out.printf("Error occurred in partition processor for partition %s, %s",
public void testEventProcessorBuilderMissingProperties() { assertThrows(NullPointerException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); }
class EventProcessorClientBuilderTest { private static final String NAMESPACE_NAME = "dummyNamespaceName"; private static final String DEFAULT_DOMAIN_NAME = "servicebus.windows.net/"; private static final String EVENT_HUB_NAME = "eventHubName"; private static final String SHARED_ACCESS_KEY_NAME = "dummySasKeyName"; private static final String SHARED_ACCESS_KEY = "dummySasKey"; private static final String ENDPOINT = getURI(ClientConstants.ENDPOINT_FORMAT, NAMESPACE_NAME, DEFAULT_DOMAIN_NAME) .toString(); private static final String CORRECT_CONNECTION_STRING = String .format("Endpoint=%s;SharedAccessKeyName=%s;SharedAccessKey=%s;EntityPath=%s", ENDPOINT, SHARED_ACCESS_KEY_NAME, SHARED_ACCESS_KEY, EVENT_HUB_NAME); private static URI getURI(String endpointFormat, String namespace, String domainName) { try { return new URI(String.format(Locale.US, endpointFormat, namespace, domainName)); } catch (URISyntaxException exception) { throw new IllegalArgumentException(String.format(Locale.US, "Invalid namespace name: %s", namespace), exception); } } @Test @Test public void testEventProcessorBuilderWithProcessEvent() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } @Test public void testEventProcessorBuilderWithBothSingleAndBatchConsumers() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println( "Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithNoProcessEventConsumer() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithProcessEventBatch() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println("Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } }
class EventProcessorClientBuilderTest { private static final String NAMESPACE_NAME = "dummyNamespaceName"; private static final String DEFAULT_DOMAIN_NAME = "servicebus.windows.net/"; private static final String EVENT_HUB_NAME = "eventHubName"; private static final String SHARED_ACCESS_KEY_NAME = "dummySasKeyName"; private static final String SHARED_ACCESS_KEY = "dummySasKey"; private static final String ENDPOINT = getURI(ClientConstants.ENDPOINT_FORMAT, NAMESPACE_NAME, DEFAULT_DOMAIN_NAME) .toString(); private static final String CORRECT_CONNECTION_STRING = String .format("Endpoint=%s;SharedAccessKeyName=%s;SharedAccessKey=%s;EntityPath=%s", ENDPOINT, SHARED_ACCESS_KEY_NAME, SHARED_ACCESS_KEY, EVENT_HUB_NAME); private static URI getURI(String endpointFormat, String namespace, String domainName) { try { return new URI(String.format(Locale.US, endpointFormat, namespace, domainName)); } catch (URISyntaxException exception) { throw new IllegalArgumentException(String.format(Locale.US, "Invalid namespace name: %s", namespace), exception); } } @Test @Test public void testEventProcessorBuilderWithProcessEvent() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } @Test public void testEventProcessorBuilderWithBothSingleAndBatchConsumers() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println( "Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithNoProcessEventConsumer() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithProcessEventBatch() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println("Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } }
Yes, this should have a `%n` at the end
public void testEventProcessorBuilderMissingProperties() { assertThrows(NullPointerException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); }
System.out.printf("Error occurred in partition processor for partition %s, %s",
public void testEventProcessorBuilderMissingProperties() { assertThrows(NullPointerException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); }
class EventProcessorClientBuilderTest { private static final String NAMESPACE_NAME = "dummyNamespaceName"; private static final String DEFAULT_DOMAIN_NAME = "servicebus.windows.net/"; private static final String EVENT_HUB_NAME = "eventHubName"; private static final String SHARED_ACCESS_KEY_NAME = "dummySasKeyName"; private static final String SHARED_ACCESS_KEY = "dummySasKey"; private static final String ENDPOINT = getURI(ClientConstants.ENDPOINT_FORMAT, NAMESPACE_NAME, DEFAULT_DOMAIN_NAME) .toString(); private static final String CORRECT_CONNECTION_STRING = String .format("Endpoint=%s;SharedAccessKeyName=%s;SharedAccessKey=%s;EntityPath=%s", ENDPOINT, SHARED_ACCESS_KEY_NAME, SHARED_ACCESS_KEY, EVENT_HUB_NAME); private static URI getURI(String endpointFormat, String namespace, String domainName) { try { return new URI(String.format(Locale.US, endpointFormat, namespace, domainName)); } catch (URISyntaxException exception) { throw new IllegalArgumentException(String.format(Locale.US, "Invalid namespace name: %s", namespace), exception); } } @Test @Test public void testEventProcessorBuilderWithProcessEvent() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } @Test public void testEventProcessorBuilderWithBothSingleAndBatchConsumers() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println( "Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithNoProcessEventConsumer() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithProcessEventBatch() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println("Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } }
class EventProcessorClientBuilderTest { private static final String NAMESPACE_NAME = "dummyNamespaceName"; private static final String DEFAULT_DOMAIN_NAME = "servicebus.windows.net/"; private static final String EVENT_HUB_NAME = "eventHubName"; private static final String SHARED_ACCESS_KEY_NAME = "dummySasKeyName"; private static final String SHARED_ACCESS_KEY = "dummySasKey"; private static final String ENDPOINT = getURI(ClientConstants.ENDPOINT_FORMAT, NAMESPACE_NAME, DEFAULT_DOMAIN_NAME) .toString(); private static final String CORRECT_CONNECTION_STRING = String .format("Endpoint=%s;SharedAccessKeyName=%s;SharedAccessKey=%s;EntityPath=%s", ENDPOINT, SHARED_ACCESS_KEY_NAME, SHARED_ACCESS_KEY, EVENT_HUB_NAME); private static URI getURI(String endpointFormat, String namespace, String domainName) { try { return new URI(String.format(Locale.US, endpointFormat, namespace, domainName)); } catch (URISyntaxException exception) { throw new IllegalArgumentException(String.format(Locale.US, "Invalid namespace name: %s", namespace), exception); } } @Test @Test public void testEventProcessorBuilderWithProcessEvent() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } @Test public void testEventProcessorBuilderWithBothSingleAndBatchConsumers() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processEvent(eventContext -> { System.out.println("Partition id = " + eventContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + eventContext.getEventData().getSequenceNumber()); }) .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println( "Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithNoProcessEventConsumer() { assertThrows(IllegalArgumentException.class, () -> { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .checkpointStore(new SampleCheckpointStore()) .consumerGroup("consumer-group") .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .buildEventProcessorClient(); }); } @Test public void testEventProcessorBuilderWithProcessEventBatch() { EventProcessorClient eventProcessorClient = new EventProcessorClientBuilder() .connectionString(CORRECT_CONNECTION_STRING) .consumerGroup("consumer-group") .processEventBatch(eventBatchContext -> { eventBatchContext.getEvents().forEach(event -> { System.out .println("Partition id = " + eventBatchContext.getPartitionContext().getPartitionId() + " and " + "sequence number of event = " + event.getSequenceNumber()); }); }, 5, Duration.ofSeconds(1)) .processError(errorContext -> { System.out.printf("Error occurred in partition processor for partition %s, %s%n", errorContext.getPartitionContext().getPartitionId(), errorContext.getThrowable()); }) .checkpointStore(new SampleCheckpointStore()) .buildEventProcessorClient(); assertNotNull(eventProcessorClient); } }
```suggestion Objects.requireNonNull(endpoint, "'endpoint' cannot be null."); ```
public CommunicationRelayAsyncClient buildAsyncClient() { Objects.requireNonNull(endpoint, "'ednpoint' cannot be null."); Objects.requireNonNull(customPolicies, "'customPolicies' cannot be null."); return new CommunicationRelayAsyncClient(createServiceImpl()); }
Objects.requireNonNull(endpoint, "'ednpoint' cannot be null.");
public CommunicationRelayAsyncClient buildAsyncClient() { Objects.requireNonNull(endpoint, "'endpoint' cannot be null."); return new CommunicationRelayAsyncClient(createServiceImpl()); }
class CommunicationRelayClientBuilder { private static final String SDK_NAME = "name"; private static final String SDK_VERSION = "version"; private static final String COMMUNICATION_IDENTITY_PROPERTIES = "azure-communication-networktravesal.properties"; private final ClientLogger logger = new ClientLogger(CommunicationRelayClientBuilder.class); private String endpoint; private AzureKeyCredential azureKeyCredential; private TokenCredential tokenCredential; private HttpClient httpClient; private HttpLogOptions httpLogOptions = new HttpLogOptions(); private HttpPipeline pipeline; private RetryPolicy retryPolicy; private Configuration configuration; private ClientOptions clientOptions; private final Map<String, String> properties = CoreUtils.getProperties(COMMUNICATION_IDENTITY_PROPERTIES); private final List<HttpPipelinePolicy> customPolicies = new ArrayList<HttpPipelinePolicy>(); /** * Set endpoint of the service * * @param endpoint url of the service * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder endpoint(String endpoint) { this.endpoint = endpoint; return this; } /** * Set endpoint of the service * * @param pipeline HttpPipeline to use, if a pipeline is not * supplied, the credential and httpClient fields must be set * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder pipeline(HttpPipeline pipeline) { this.pipeline = pipeline; return this; } /** * Sets the {@link TokenCredential} used to authenticate HTTP requests. * * @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder credential(TokenCredential tokenCredential) { this.tokenCredential = tokenCredential; return this; } /** * Sets the {@link AzureKeyCredential} used to authenticate HTTP requests. * * @param keyCredential The {@link AzureKeyCredential} used to authenticate HTTP requests. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder credential(AzureKeyCredential keyCredential) { this.azureKeyCredential = keyCredential; return this; } /** * Set endpoint and credential to use * * @param connectionString connection string for setting endpoint and initalizing CommunicationClientCredential * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder connectionString(String connectionString) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); CommunicationConnectionString connectionStringObject = new CommunicationConnectionString(connectionString); String endpoint = connectionStringObject.getEndpoint(); String accessKey = connectionStringObject.getAccessKey(); this .endpoint(endpoint) .credential(new AzureKeyCredential(accessKey)); return this; } /** * Set httpClient to use * * @param httpClient httpClient to use, overridden by the pipeline * field. * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder httpClient(HttpClient httpClient) { this.httpClient = httpClient; return this; } /** * Apply additional HttpPipelinePolicy * * @param customPolicy HttpPipelinePolicy object to be applied after * AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder addPolicy(HttpPipelinePolicy customPolicy) { this.customPolicies.add(customPolicy); return this; } /** * Sets the client options for all the requests made through the client. * * @param clientOptions {@link ClientOptions}. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the configuration object used to retrieve environment configuration values during building of the client. * * @param configuration Configuration store used to retrieve environment configurations. * @return the updated CommunicationRelayClientBuilder object */ public CommunicationRelayClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the {@link HttpLogOptions} for service requests. * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * @return the updated CommunicationRelayClientBuilder object */ public CommunicationRelayClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Sets the {@link RetryPolicy} that is used when each request is sent. * * @param retryPolicy User's retry policy applied to each request. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link CommunicationRelayServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version of the client library will have the result of potentially moving to a newer service version. * <p> * Targeting a specific service version may also mean that the service will return an error for newer APIs. * * @param version {@link CommunicationRelayServiceVersion} of the service to be used when making requests. * @return the updated CommunicationRelayClientBuilder object */ public CommunicationRelayClientBuilder serviceVersion(CommunicationRelayServiceVersion version) { return this; } /** * Create asynchronous client applying HMACAuthenticationPolicy, UserAgentPolicy, * RetryPolicy, and CookiePolicy. * Additional HttpPolicies specified by additionalPolicies will be applied after them * * @return CommunicationRelayAsyncClient instance */ /** * Create synchronous client applying HmacAuthenticationPolicy, UserAgentPolicy, * RetryPolicy, and CookiePolicy. * Additional HttpPolicies specified by additionalPolicies will be applied after them * * @return CommunicationRelayClient instance */ public CommunicationRelayClient buildClient() { Objects.requireNonNull(endpoint, "'ednpoint' cannot be null."); Objects.requireNonNull(customPolicies, "'customPolicies' cannot be null."); return new CommunicationRelayClient(buildAsyncClient()); } private CommunicationNetworkingClientImpl createServiceImpl() { HttpPipeline builderPipeline = this.pipeline; if (this.pipeline == null) { builderPipeline = createHttpPipeline(httpClient, createHttpPipelineAuthPolicy(), customPolicies); } CommunicationNetworkingClientImplBuilder clientBuilder = new CommunicationNetworkingClientImplBuilder(); clientBuilder.endpoint(endpoint) .pipeline(builderPipeline); return clientBuilder.buildClient(); } private HttpPipelinePolicy createHttpPipelineAuthPolicy() { if (this.tokenCredential != null && this.azureKeyCredential != null) { throw logger.logExceptionAsError( new IllegalArgumentException("Both 'credential' and 'accessKey' are set. Just one may be used.")); } if (this.tokenCredential != null) { return new BearerTokenAuthenticationPolicy( this.tokenCredential, "https: } else if (this.azureKeyCredential != null) { return new HmacAuthenticationPolicy(this.azureKeyCredential); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Missing credential information while building a client.")); } } private HttpPipeline createHttpPipeline(HttpClient httpClient, HttpPipelinePolicy authorizationPolicy, List<HttpPipelinePolicy> customPolicies) { List<HttpPipelinePolicy> policies = new ArrayList<HttpPipelinePolicy>(); applyRequiredPolicies(policies, authorizationPolicy); if (customPolicies != null && customPolicies.size() > 0) { policies.addAll(customPolicies); } return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .clientOptions(clientOptions) .build(); } private void applyRequiredPolicies(List<HttpPipelinePolicy> policies, HttpPipelinePolicy authorizationPolicy) { String clientName = properties.getOrDefault(SDK_NAME, "UnknownName"); String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion"); ClientOptions buildClientOptions = (clientOptions == null) ? new ClientOptions() : clientOptions; HttpLogOptions buildLogOptions = (httpLogOptions == null) ? new HttpLogOptions() : httpLogOptions; String applicationId = null; if (!CoreUtils.isNullOrEmpty(buildClientOptions.getApplicationId())) { applicationId = buildClientOptions.getApplicationId(); } else if (!CoreUtils.isNullOrEmpty(buildLogOptions.getApplicationId())) { applicationId = buildLogOptions.getApplicationId(); } policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion, configuration)); policies.add(new RequestIdPolicy()); policies.add(this.retryPolicy == null ? new RetryPolicy() : this.retryPolicy); policies.add(new CookiePolicy()); policies.add(authorizationPolicy); policies.add(new HttpLoggingPolicy(httpLogOptions)); } }
class CommunicationRelayClientBuilder { private static final String SDK_NAME = "name"; private static final String SDK_VERSION = "version"; private static final String COMMUNICATION_NETWORK_TRAVERSAL_PROPERTIES = "azure-communication-networktraversal.properties"; private final ClientLogger logger = new ClientLogger(CommunicationRelayClientBuilder.class); private String endpoint; private AzureKeyCredential azureKeyCredential; private TokenCredential tokenCredential; private HttpClient httpClient; private HttpLogOptions httpLogOptions = new HttpLogOptions(); private HttpPipeline pipeline; private RetryPolicy retryPolicy; private Configuration configuration; private ClientOptions clientOptions; private String connectionString; private final Map<String, String> properties = CoreUtils.getProperties(COMMUNICATION_NETWORK_TRAVERSAL_PROPERTIES); private final List<HttpPipelinePolicy> customPolicies = new ArrayList<>(); /** * Set endpoint of the service * * @param endpoint url of the service * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder endpoint(String endpoint) { this.endpoint = endpoint; return this; } /** * Set endpoint of the service * * @param pipeline HttpPipeline to use, if a pipeline is not * supplied, the credential and httpClient fields must be set * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder pipeline(HttpPipeline pipeline) { this.pipeline = pipeline; return this; } /** * Sets the {@link TokenCredential} used to authenticate HTTP requests. * * @param tokenCredential {@link TokenCredential} used to authenticate HTTP requests. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder credential(TokenCredential tokenCredential) { this.tokenCredential = tokenCredential; return this; } /** * Sets the {@link AzureKeyCredential} used to authenticate HTTP requests. * * @param keyCredential The {@link AzureKeyCredential} used to authenticate HTTP requests. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder credential(AzureKeyCredential keyCredential) { this.azureKeyCredential = keyCredential; return this; } /** * Set endpoint and credential to use * * @param connectionString connection string for setting endpoint and initalizing CommunicationClientCredential * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder connectionString(String connectionString) { CommunicationConnectionString connectionStringObject = new CommunicationConnectionString(connectionString); String endpoint = connectionStringObject.getEndpoint(); String accessKey = connectionStringObject.getAccessKey(); this .endpoint(endpoint) .credential(new AzureKeyCredential(accessKey)); return this; } /** * Set httpClient to use * * @param httpClient httpClient to use, overridden by the pipeline * field. * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder httpClient(HttpClient httpClient) { this.httpClient = httpClient; return this; } /** * Apply additional HttpPipelinePolicy * * @param customPolicy HttpPipelinePolicy object to be applied after * AzureKeyCredentialPolicy, UserAgentPolicy, RetryPolicy, and CookiePolicy * @return CommunicationRelayClientBuilder */ public CommunicationRelayClientBuilder addPolicy(HttpPipelinePolicy customPolicy) { this.customPolicies.add(customPolicy); return this; } /** * Sets the client options for all the requests made through the client. * * @param clientOptions {@link ClientOptions}. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the configuration object used to retrieve environment configuration values during building of the client. * * @param configuration Configuration store used to retrieve environment configurations. * @return the updated CommunicationRelayClientBuilder object */ public CommunicationRelayClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the {@link HttpLogOptions} for service requests. * * @param logOptions The logging configuration to use when sending and receiving HTTP requests/responses. * @return the updated CommunicationRelayClientBuilder object */ public CommunicationRelayClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Sets the {@link RetryPolicy} that is used when each request is sent. * * @param retryPolicy User's retry policy applied to each request. * @return The updated {@link CommunicationRelayClientBuilder} object. */ public CommunicationRelayClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link CommunicationRelayServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version of the client library will have the result of potentially moving to a newer service version. * <p> * Targeting a specific service version may also mean that the service will return an error for newer APIs. * * @param version {@link CommunicationRelayServiceVersion} of the service to be used when making requests. * @return the updated CommunicationRelayClientBuilder object */ public CommunicationRelayClientBuilder serviceVersion(CommunicationRelayServiceVersion version) { return this; } /** * Create asynchronous client applying HMACAuthenticationPolicy, UserAgentPolicy, * RetryPolicy, and CookiePolicy. * Additional HttpPolicies specified by additionalPolicies will be applied after them * * @return CommunicationRelayAsyncClient instance */ /** * Create synchronous client applying HmacAuthenticationPolicy, UserAgentPolicy, * RetryPolicy, and CookiePolicy. * Additional HttpPolicies specified by additionalPolicies will be applied after them * * @return CommunicationRelayClient instance */ public CommunicationRelayClient buildClient() { return new CommunicationRelayClient(buildAsyncClient()); } private CommunicationNetworkingClientImpl createServiceImpl() { HttpPipeline builderPipeline = this.pipeline; if (this.pipeline == null) { builderPipeline = createHttpPipeline(httpClient, createHttpPipelineAuthPolicy(), customPolicies); } CommunicationNetworkingClientImplBuilder clientBuilder = new CommunicationNetworkingClientImplBuilder(); clientBuilder.endpoint(endpoint) .pipeline(builderPipeline); return clientBuilder.buildClient(); } private HttpPipelinePolicy createHttpPipelineAuthPolicy() { if (this.tokenCredential != null && this.azureKeyCredential != null) { throw logger.logExceptionAsError( new IllegalArgumentException("Both 'credential' and 'accessKey' are set. Just one may be used.")); } if (this.tokenCredential != null) { return new BearerTokenAuthenticationPolicy( this.tokenCredential, "https: } else if (this.azureKeyCredential != null) { return new HmacAuthenticationPolicy(this.azureKeyCredential); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Missing credential information while building a client.")); } } private HttpPipeline createHttpPipeline(HttpClient httpClient, HttpPipelinePolicy authorizationPolicy, List<HttpPipelinePolicy> customPolicies) { List<HttpPipelinePolicy> policies = new ArrayList<HttpPipelinePolicy>(); applyRequiredPolicies(policies, authorizationPolicy); if (customPolicies != null && customPolicies.size() > 0) { policies.addAll(customPolicies); } return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .httpClient(httpClient) .clientOptions(clientOptions) .build(); } private void applyRequiredPolicies(List<HttpPipelinePolicy> policies, HttpPipelinePolicy authorizationPolicy) { String clientName = properties.getOrDefault(SDK_NAME, "UnknownName"); String clientVersion = properties.getOrDefault(SDK_VERSION, "UnknownVersion"); ClientOptions buildClientOptions = (clientOptions == null) ? new ClientOptions() : clientOptions; HttpLogOptions buildLogOptions = (httpLogOptions == null) ? new HttpLogOptions() : httpLogOptions; String applicationId = null; if (!CoreUtils.isNullOrEmpty(buildClientOptions.getApplicationId())) { applicationId = buildClientOptions.getApplicationId(); } else if (!CoreUtils.isNullOrEmpty(buildLogOptions.getApplicationId())) { applicationId = buildLogOptions.getApplicationId(); } policies.add(new UserAgentPolicy(applicationId, clientName, clientVersion, configuration)); policies.add(new RequestIdPolicy()); policies.add(this.retryPolicy == null ? new RetryPolicy() : this.retryPolicy); policies.add(new CookiePolicy()); policies.add(authorizationPolicy); policies.add(new HttpLoggingPolicy(httpLogOptions)); } }
this system.out.println can be removed.
public void recognizeCustomFormInvalidStatus(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((invalidSourceUrl) -> beginTrainingLabeledRunner((training, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(training, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeCustomFormsFromUrl( createdModel.getModelId(), invalidSourceUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE) .getFinalResult()); System.out.println(httpResponseException.getValue()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(INVALID_SOURCE_URL_EXCEPTION_MESSAGE, errorInformation.getMessage()); })); }
System.out.println(httpResponseException.getValue());
public void recognizeCustomFormInvalidStatus(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((invalidSourceUrl) -> beginTrainingLabeledRunner((training, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(training, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeCustomFormsFromUrl( createdModel.getModelId(), invalidSourceUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE) .getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(INVALID_SOURCE_URL_EXCEPTION_MESSAGE, errorInformation.getMessage()); })); }
class FormRecognizerClientTest extends FormRecognizerClientTestBase { private FormRecognizerClient client; private FormRecognizerClient getFormRecognizerClient(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { return getFormRecognizerClientBuilder(httpClient, serviceVersion).buildClient(); } private FormTrainingClient getFormTrainingClient(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { return getFormTrainingClientBuilder(httpClient, serviceVersion).buildClient(); } /** * Verifies receipt data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts(data, dataLength, new RecognizeReceiptsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeReceipts(null, 0)); } /** * Verifies content type will be auto detected when using receipt API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( getContentDetectionFileData(filePath), dataLength, new RecognizeReceiptsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies receipt data for a document using source as as input stream data and text content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( data, dataLength, new RecognizeReceiptsOptions().setContentType(FormContentType.IMAGE_JPEG) .setFieldElementsIncluded(true).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies receipt data from a document using PNG file data as source and including text content details. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts(data, dataLength, new RecognizeReceiptsOptions().setContentType( FormContentType.IMAGE_PNG).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_PNG); } /** * Verifies receipt data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( data, dataLength, new RecognizeReceiptsOptions().setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateBlankPdfResultData(syncPoller.getFinalResult()); }, BLANK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( data, dataLength, new RecognizeReceiptsOptions().setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultipageReceiptData(syncPoller.getFinalResult()); }, MULTIPAGE_RECEIPT_PDF); } /** * Verify that receipt recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeReceipts(data, dataLength, new RecognizeReceiptsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE) .getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verifies receipt data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((sourceUrl) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl(sourceUrl).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize receipt from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeReceiptFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions() .setPollInterval(durationTestMode), Context.NONE)); validateExceptionSource(errorResponseException); }); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((sourceUrl) -> assertThrows(HttpResponseException.class, () -> client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions().setPollInterval(durationTestMode), Context.NONE))); } /** * Verifies receipt data for a document using source as file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions().setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies receipt data for a document using source as PNG file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions() .setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_PNG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled public void recognizeReceiptFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(receiptUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl( receiptUrl, new RecognizeReceiptsOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultipageReceiptData(syncPoller.getFinalResult()); }, MULTIPAGE_INVOICE_PDF); } /** * Verifies layout/content data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContent(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, FORM_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentResultWithNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeContent(null, 0)); } /** * Verifies content type will be auto detected when using content/layout API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentResultWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent( getContentDetectionFileData(filePath), dataLength, new RecognizeContentOptions() .setContentType(null).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, FORM_JPG); } /** * Verifies blank form file is still a valid file to process */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentResultWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, BLANK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, MULTIPAGE_INVOICE_PDF); } /** * Verify that content recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE) .getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) errorResponseException.getValue(); assertEquals(INVALID_IMAGE_ERROR_CODE, errorInformation.getErrorCode()); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithSelectionMarks(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, SELECTION_MARK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode) .setPages(Collections.singletonList("1")), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(1, formPages.size()); }, MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithPages(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode) .setPages(Arrays.asList("1", "2")), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(2, formPages.size()); }, MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithPageRange(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode) .setPages(Arrays.asList("1-2", "3")), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(3, formPages.size()); }, MULTIPAGE_INVOICE_PDF); } /** * Verifies layout data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentAppearance(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(FormContentType.IMAGE_JPEG) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(TextStyleName.OTHER, formPages.get(0).getLines().get(0).getAppearance().getStyleName()); }, FORM_JPG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, FORM_JPG); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize a content from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeContentFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions() .setPollInterval(durationTestMode), Context.NONE)); validateExceptionSource(errorResponseException); }); } /** * Verifies layout data for a pdf url */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromUrlWithPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, INVOICE_6_PDF); } /** * Verifies that an exception is thrown for invalid source url for recognizing content/layout information. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((invalidSourceUrl) -> assertThrows( HttpResponseException.class, () -> client.beginRecognizeContentFromUrl(invalidSourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE))); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((formUrl) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(formUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithSelectionMarksFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, SELECTION_MARK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeGermanContentFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); testingContainerUrlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode) .setLanguage(FormRecognizerLanguage.DE), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); validateNetworkCallRecord("language", "de"); }, CONTENT_GERMAN_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentIncorrectLanguageFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); testingContainerUrlRunner(sourceUrl -> { HttpResponseException exception = assertThrows(HttpResponseException.class, () -> client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode) .setLanguage(FormRecognizerLanguage.fromString("language")), Context.NONE)); assertEquals(((FormRecognizerErrorInformation) exception.getValue()).getErrorCode(), "NotSupportedLanguage"); }, CONTENT_GERMAN_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid labeled model Id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(IMAGE_JPEG).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), FORM_JPG); } /** * Verifies custom form data for a JPG content type with labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), FORM_JPG); } /** * Verifies custom form data for a blank PDF content type with labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), BLANK_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid labeled model Id, * excluding field elements. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataExcludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), FORM_JPG); } /** * Verifies an exception thrown for a document using null form data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithNullFormData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomForms( syncPoller.getFinalResult().getModelId(), (InputStream) null, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE)); }), INVOICE_6_PDF ); } /** * Verifies an exception thrown for a document using null model id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { Exception ex = assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomForms( null, data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE)); assertEquals(MODEL_ID_IS_REQUIRED_EXCEPTION_MESSAGE, ex.getMessage()); }, INVOICE_6_PDF); } /** * Verifies an exception thrown for an empty model id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { Exception ex = assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomForms( "", data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE)); assertEquals(INVALID_UUID_EXCEPTION_MESSAGE, ex.getMessage()); }, INVOICE_6_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils /** * Verifies content type will be auto detected when using custom form API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> beginTrainingLabeledRunner( (trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms(trainingPoller.getFinalResult().getModelId(), getContentDetectionFileData(filePath), dataLength, new RecognizeCustomFormsOptions().setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), FORM_JPG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, true, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); String modelId = trainingPoller.getFinalResult().getModelId(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( modelId, data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataLabeled(syncPoller.getFinalResult(), modelId); }), MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginSelectionMarkTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms(trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), SELECTION_MARK_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid labeled model Id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), INVOICE_6_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid include element references */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, false); }), INVOICE_6_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, false, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataUnlabeled(syncPoller.getFinalResult()); }), MULTIPAGE_INVOICE_PDF); } /** * Verifies custom form data for a JPG content type with unlabeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), FORM_JPG); } /** * Verifies custom form data for a blank PDF content type with unlabeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), BLANK_PDF); } /** * Verifies custom form data for an URL document data without labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), FORM_JPG); } /** * Verifies custom form data for an URL document data without labeled data and include element references */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setFieldElementsIncluded(true).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, false); }), FORM_JPG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); testingContainerUrlRunner(fileUrl -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, false, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataUnlabeled(syncPoller.getFinalResult()); }), MULTIPAGE_INVOICE_PDF); } /** * Verifies that an exception is thrown for invalid training data source. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); HttpResponseException httpResponseException = assertThrows( HttpResponseException.class, () -> client.beginRecognizeCustomFormsFromUrl( createdModel.getModelId(), INVALID_URL, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE).getFinalResult()); final FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(INVALID_SOURCE_URL_ERROR_CODE, errorInformation.getErrorCode()); }); } /** * Verifies an exception thrown for a null model id when recognizing custom form from URL. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormFromUrlLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> { Exception ex = assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomFormsFromUrl( null, fileUrl, new RecognizeCustomFormsOptions().setPollInterval(durationTestMode), Context.NONE)); assertEquals(MODEL_ID_IS_REQUIRED_EXCEPTION_MESSAGE, ex.getMessage()); }, FORM_JPG); } /** * Verifies an exception thrown for an empty model id for recognizing custom forms from URL. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormFromUrlLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingMultipageRunner((trainingFilesUrl) -> { IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> client.beginRecognizeCustomFormsFromUrl("", fileUrl, new RecognizeCustomFormsOptions().setPollInterval(durationTestMode), Context.NONE)); assertEquals(INVALID_UUID_EXCEPTION_MESSAGE, ex.getMessage()); }), FORM_JPG); } /** * Verifies custom form data for an URL document data with labeled data and include element references */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlLabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setFieldElementsIncluded(true).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), FORM_JPG); } /** * Verifies custom form data for an URL document data with labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { urlRunner(fileUrl -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { client = getFormRecognizerClient(httpClient, serviceVersion); SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), FORM_JPG); } /** * Verify custom form for an URL of multi-page labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, true, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); String modelId = trainingPoller.getFinalResult().getModelId(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( modelId, fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataLabeled(syncPoller.getFinalResult(), modelId); }), MULTIPAGE_INVOICE_PDF); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with \ * encoded blank space as input data to recognize a custom form from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeCustomFormFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client .beginRecognizeCustomFormsFromUrl(NON_EXIST_MODEL_ID, sourceUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE)); validateExceptionSource(errorResponseException); }); } /** * Verify that custom forom with invalid model id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlNonExistModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeCustomFormsFromUrl(NON_EXIST_MODEL_ID, fileUrl, new RecognizeCustomFormsOptions().setPollInterval(durationTestMode), Context.NONE)); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) errorResponseException.getValue(); assertEquals(INVALID_MODEL_ID_ERROR_CODE, errorInformation.getErrorCode()); }, FORM_JPG); } /** * Verify that custom form with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeCustomForms(trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals("Invalid input file.", errorInformation.getMessage()); })); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { urlRunner(fileUrl -> beginSelectionMarkTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { client = getFormRecognizerClient(httpClient, serviceVersion); SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl(trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions().setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), SELECTION_MARK_PDF); } /** * Verifies recognized form type when labeled model used for recognition and model name is provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizeFormTypeLabeledWithModelName( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model1"), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("custom:model1", recognizedForm.getFormType()); assertNotNull(recognizedForm.getFormTypeConfidence()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("custom:model1", submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when labeled model used for recognition and model name is not provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizedFormTypeLabeledModel( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("custom:" + createdModel.getModelId(), recognizedForm.getFormType()); assertNotNull(recognizedForm.getFormTypeConfidence()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("custom:" + createdModel.getModelId(), submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when unlabeled model used for recognition and model name is not provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizedFormTypeUnlabeledModel( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("form-0", recognizedForm.getFormType()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("form-0", submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when unlabeled model used for recognition and model name is provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizedFormTypeUnlabeledModelWithModelName( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model1"), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("form-0", recognizedForm.getFormType()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("form-0", submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when using composed model for recognition when display name is not provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizeFormTypeComposedModel( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller1 = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); CustomFormModel createdModel1 = syncPoller1.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller2 = formTrainingClient.beginCreateComposedModel( Arrays.asList(createdModel.getModelId(), createdModel1.getModelId()), new CreateComposedModelOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller2.waitForCompletion(); CustomFormModel composedModel = syncPoller2.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller3 = formRecognizerClient.beginRecognizeCustomForms( composedModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller3.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller3.getFinalResult().stream().findFirst().get(); if (recognizedForm.getFormType().equals("custom:" + createdModel1.getModelId()) || recognizedForm.getFormType().equals("custom:" + createdModel.getModelId())) { assertTrue(true); } else { fail(); } assertNotNull(recognizedForm.getFormTypeConfidence()); composedModel.getSubmodels() .forEach(customFormSubmodel -> { if (createdModel.getModelId().equals(customFormSubmodel.getModelId())) { assertEquals("custom:" + createdModel.getModelId(), customFormSubmodel.getFormType()); } else { assertEquals("custom:" + createdModel1.getModelId(), customFormSubmodel.getFormType()); } }); formTrainingClient.deleteModel(createdModel.getModelId()); formTrainingClient.deleteModel(createdModel1.getModelId()); formTrainingClient.deleteModel(composedModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when using composed model for recognition when model name is provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizeFormTypeComposedModelWithModelName( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model1"), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller1 = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model2"), Context.NONE); syncPoller1.waitForCompletion(); CustomFormModel createdModel1 = syncPoller1.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller2 = formTrainingClient.beginCreateComposedModel( Arrays.asList(createdModel.getModelId(), createdModel1.getModelId()), new CreateComposedModelOptions().setModelName("composedModelName"), Context.NONE).setPollInterval(durationTestMode); syncPoller2.waitForCompletion(); CustomFormModel composedModel = syncPoller2.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller3 = formRecognizerClient.beginRecognizeCustomForms( composedModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller3.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller3.getFinalResult().stream().findFirst().get(); String expectedFormType1 = "composedModelName:model1"; String expectedFormType2 = "composedModelName:model2"; assertTrue(expectedFormType1.equals(recognizedForm.getFormType()) || expectedFormType2.equals(recognizedForm.getFormType())); assertNotNull(recognizedForm.getFormTypeConfidence()); formTrainingClient.deleteModel(createdModel.getModelId()); formTrainingClient.deleteModel(createdModel1.getModelId()); formTrainingClient.deleteModel(composedModel.getModelId()); }); }, FORM_JPG); } /** * Verifies business card data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(FormContentType.IMAGE_JPEG), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeBusinessCards( null, 0)); } /** * Verifies content type will be auto detected when using business card API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(getContentDetectionFileData(filePath), dataLength, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies business card data for a document using source as as input stream data and text content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(FormContentType.IMAGE_JPEG) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies business card data from a document using PNG file data as source and including text content details. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType( FormContentType.IMAGE_PNG).setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_PNG); } /** * Verifies business card data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(APPLICATION_PDF), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateBlankPdfResultData(syncPoller.getFinalResult()); }, BLANK_PDF); } /** * Verify that business card recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(APPLICATION_PDF), Context.NONE) .setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verify business card recognition with multipage pdf. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeMultipageBusinessCard(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateMultipageBusinessData(syncPoller.getFinalResult()); }, MULTIPAGE_BUSINESS_CARD_PDF); } /** * Verifies business card data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((sourceUrl) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize business card from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeBusinessCardFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode)); validateExceptionSource(errorResponseException); }); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((sourceUrl) -> assertThrows(HttpResponseException.class, () -> client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode))); } /** * Verifies business card data for a document using source as file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions().setFieldElementsIncluded(true), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies business card data for a document using source as PNG file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions().setFieldElementsIncluded(true), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_PNG); } /** * Verify business card recognition with multipage pdf url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeMultipageBusinessCardUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions() .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateMultipageBusinessData(syncPoller.getFinalResult()); }, MULTIPAGE_BUSINESS_CARD_PDF); } /** * Verify locale parameter passed when specified by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void receiptValidLocale(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { client.beginRecognizeReceipts( getContentDetectionFileData(filePath), dataLength, new RecognizeReceiptsOptions().setPollInterval(durationTestMode) .setLocale(FormRecognizerLocale.EN_US), Context.NONE); validateNetworkCallRecord("locale", "en-US"); }, RECEIPT_CONTOSO_JPG); } /** * Verifies invoice data recognition for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions().setContentType(APPLICATION_PDF), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, INVOICE); }, INVOICE_PDF); } /** * Verifies content type will be auto detected when using invoice API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(getContentDetectionFileData(filePath), dataLength, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, INVOICE); }, INVOICE_PDF); } /** * Verifies invoice data for a document using source as as input stream data and text content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, INVOICE); }, INVOICE_PDF); } /** * Verifies invoice data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions().setContentType(APPLICATION_PDF), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateBlankPdfResultData(syncPoller.getFinalResult()); }, BLANK_PDF); } /** * Verify that invoice recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions().setContentType(APPLICATION_PDF), Context.NONE).setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verify invoice data recognition with multipage pdf. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeMultipageInvoice(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateMultipageInvoiceData(syncPoller.getFinalResult()); }, MULTIPAGE_VENDOR_INVOICE_PDF); } /** * Verifies invoice card data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((sourceUrl) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, INVOICE); }, INVOICE_PDF); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize invoice card from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeInvoiceFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode)); validateExceptionSource(errorResponseException); }); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((sourceUrl) -> assertThrows(HttpResponseException.class, () -> client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode))); } /** * Verifies invoice data for a document using source as file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions().setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, INVOICE); }, INVOICE_PDF); } /** * Verify locale parameter passed when specified by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void invoiceValidLocale(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { client.beginRecognizeInvoices( getContentDetectionFileData(filePath), dataLength, new RecognizeInvoicesOptions().setLocale(FormRecognizerLocale.EN_US), Context.NONE).setPollInterval(durationTestMode); validateNetworkCallRecord("locale", "en-US"); }, INVOICE_PDF); } /** * Verifies license card data from a document using file data as source. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseCardData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments(data, dataLength, new RecognizeIdentityDocumentOptions().setContentType(FormContentType.IMAGE_JPEG), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeIdentityDocuments( null, 0)); } /** * Verifies content type will be auto detected when using custom form API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments( getContentDetectionFileData(filePath), dataLength, new RecognizeIdentityDocumentOptions(), Context.NONE ).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies identity document data from a document using file data as source and including element reference details. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments(data, dataLength, new RecognizeIdentityDocumentOptions() .setContentType(FormContentType.IMAGE_JPEG) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies identity document data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments( data, dataLength, new RecognizeIdentityDocumentOptions() .setContentType(FormContentType.APPLICATION_PDF), Context.NONE ).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); assertEquals(0, syncPoller.getFinalResult().size()); }, BLANK_PDF); } /** * Verify that identity document recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeIdentityDocuments( data, dataLength, new RecognizeIdentityDocumentOptions() .setContentType(FormContentType.APPLICATION_PDF), Context.NONE ).setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verifies business card data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocumentsFromUrl(sourceUrl, new RecognizeIdentityDocumentOptions(), Context.NONE ).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((invalidSourceUrl) -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeIdentityDocumentsFromUrl( invalidSourceUrl, new RecognizeIdentityDocumentOptions(), Context.NONE ).setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) errorResponseException.getValue(); assertEquals(INVALID_IMAGE_URL_ERROR_CODE, errorInformation.getErrorCode()); }); } /** * Verifies license identity data for a document using source as file url and include content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocumentsFromUrl(sourceUrl, new RecognizeIdentityDocumentOptions().setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, IDENTITY); }, LICENSE_CARD_JPG); } }
class FormRecognizerClientTest extends FormRecognizerClientTestBase { private FormRecognizerClient client; private FormRecognizerClient getFormRecognizerClient(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { return getFormRecognizerClientBuilder(httpClient, serviceVersion).buildClient(); } private FormTrainingClient getFormTrainingClient(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { return getFormTrainingClientBuilder(httpClient, serviceVersion).buildClient(); } /** * Verifies receipt data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts(data, dataLength, new RecognizeReceiptsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeReceipts(null, 0)); } /** * Verifies content type will be auto detected when using receipt API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( getContentDetectionFileData(filePath), dataLength, new RecognizeReceiptsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies receipt data for a document using source as as input stream data and text content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( data, dataLength, new RecognizeReceiptsOptions().setContentType(FormContentType.IMAGE_JPEG) .setFieldElementsIncluded(true).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies receipt data from a document using PNG file data as source and including text content details. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts(data, dataLength, new RecognizeReceiptsOptions().setContentType( FormContentType.IMAGE_PNG).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_PNG); } /** * Verifies receipt data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( data, dataLength, new RecognizeReceiptsOptions().setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateBlankPdfResultData(syncPoller.getFinalResult()); }, BLANK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceipts( data, dataLength, new RecognizeReceiptsOptions().setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultipageReceiptData(syncPoller.getFinalResult()); }, MULTIPAGE_RECEIPT_PDF); } /** * Verify that receipt recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeReceipts(data, dataLength, new RecognizeReceiptsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE) .getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verifies receipt data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((sourceUrl) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl(sourceUrl).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize receipt from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeReceiptFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions() .setPollInterval(durationTestMode), Context.NONE)); validateExceptionSource(errorResponseException); }); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((sourceUrl) -> assertThrows(HttpResponseException.class, () -> client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions().setPollInterval(durationTestMode), Context.NONE))); } /** * Verifies receipt data for a document using source as file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions().setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_JPG); } /** * Verifies receipt data for a document using source as PNG file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeReceiptSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl(sourceUrl, new RecognizeReceiptsOptions() .setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, RECEIPT); }, RECEIPT_CONTOSO_PNG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled public void recognizeReceiptFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(receiptUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeReceiptsFromUrl( receiptUrl, new RecognizeReceiptsOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultipageReceiptData(syncPoller.getFinalResult()); }, MULTIPAGE_INVOICE_PDF); } /** * Verifies layout/content data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContent(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, FORM_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentResultWithNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeContent(null, 0)); } /** * Verifies content type will be auto detected when using content/layout API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentResultWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent( getContentDetectionFileData(filePath), dataLength, new RecognizeContentOptions() .setContentType(null).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, FORM_JPG); } /** * Verifies blank form file is still a valid file to process */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentResultWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, BLANK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromDataMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, MULTIPAGE_INVOICE_PDF); } /** * Verify that content recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE) .getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) errorResponseException.getValue(); assertEquals(INVALID_IMAGE_ERROR_CODE, errorInformation.getErrorCode()); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithSelectionMarks(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, SELECTION_MARK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode) .setPages(Collections.singletonList("1")), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(1, formPages.size()); }, MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithPages(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode) .setPages(Arrays.asList("1", "2")), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(2, formPages.size()); }, MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithPageRange(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(APPLICATION_PDF) .setPollInterval(durationTestMode) .setPages(Arrays.asList("1-2", "3")), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(3, formPages.size()); }, MULTIPAGE_INVOICE_PDF); } /** * Verifies layout data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentAppearance(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContent(data, dataLength, new RecognizeContentOptions() .setContentType(FormContentType.IMAGE_JPEG) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); List<FormPage> formPages = syncPoller.getFinalResult(); validateContentResultData(formPages, false); assertEquals(TextStyleName.OTHER, formPages.get(0).getLines().get(0).getAppearance().getStyleName()); }, FORM_JPG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, FORM_JPG); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize a content from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeContentFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions() .setPollInterval(durationTestMode), Context.NONE)); validateExceptionSource(errorResponseException); }); } /** * Verifies layout data for a pdf url */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromUrlWithPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, INVOICE_6_PDF); } /** * Verifies that an exception is thrown for invalid source url for recognizing content/layout information. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((invalidSourceUrl) -> assertThrows( HttpResponseException.class, () -> client.beginRecognizeContentFromUrl(invalidSourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE))); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentFromUrlMultiPage(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((formUrl) -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(formUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentWithSelectionMarksFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); }, SELECTION_MARK_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeGermanContentFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); testingContainerUrlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<FormPage>> syncPoller = client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode) .setLanguage(FormRecognizerLanguage.DE), Context.NONE); syncPoller.waitForCompletion(); validateContentResultData(syncPoller.getFinalResult(), false); validateNetworkCallRecord("language", "de"); }, CONTENT_GERMAN_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeContentIncorrectLanguageFromUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); testingContainerUrlRunner(sourceUrl -> { HttpResponseException exception = assertThrows(HttpResponseException.class, () -> client.beginRecognizeContentFromUrl(sourceUrl, new RecognizeContentOptions().setPollInterval(durationTestMode) .setLanguage(FormRecognizerLanguage.fromString("language")), Context.NONE)); assertEquals(((FormRecognizerErrorInformation) exception.getValue()).getErrorCode(), "NotSupportedLanguage"); }, CONTENT_GERMAN_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid labeled model Id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(IMAGE_JPEG).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), FORM_JPG); } /** * Verifies custom form data for a JPG content type with labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), FORM_JPG); } /** * Verifies custom form data for a blank PDF content type with labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), BLANK_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid labeled model Id, * excluding field elements. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataExcludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), FORM_JPG); } /** * Verifies an exception thrown for a document using null form data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithNullFormData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomForms( syncPoller.getFinalResult().getModelId(), (InputStream) null, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE)); }), INVOICE_6_PDF ); } /** * Verifies an exception thrown for a document using null model id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { Exception ex = assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomForms( null, data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE)); assertEquals(MODEL_ID_IS_REQUIRED_EXCEPTION_MESSAGE, ex.getMessage()); }, INVOICE_6_PDF); } /** * Verifies an exception thrown for an empty model id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { Exception ex = assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomForms( "", data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE)); assertEquals(INVALID_UUID_EXCEPTION_MESSAGE, ex.getMessage()); }, INVOICE_6_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils /** * Verifies content type will be auto detected when using custom form API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> beginTrainingLabeledRunner( (trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms(trainingPoller.getFinalResult().getModelId(), getContentDetectionFileData(filePath), dataLength, new RecognizeCustomFormsOptions().setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), FORM_JPG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, true, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); String modelId = trainingPoller.getFinalResult().getModelId(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( modelId, data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataLabeled(syncPoller.getFinalResult(), modelId); }), MULTIPAGE_INVOICE_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginSelectionMarkTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms(trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), SELECTION_MARK_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid labeled model Id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), INVOICE_6_PDF); } /** * Verifies custom form data for a document using source as input stream data and valid include element references */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, false); }), INVOICE_6_PDF); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, false, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataUnlabeled(syncPoller.getFinalResult()); }), MULTIPAGE_INVOICE_PDF); } /** * Verifies custom form data for a JPG content type with unlabeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledDataWithJpgContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), FORM_JPG); } /** * Verifies custom form data for a blank PDF content type with unlabeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUnlabeledDataWithBlankPdfContentType(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomForms( trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), BLANK_PDF); } /** * Verifies custom form data for an URL document data without labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlUnlabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, false); }), FORM_JPG); } /** * Verifies custom form data for an URL document data without labeled data and include element references */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlUnlabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setFieldElementsIncluded(true).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, false); }), FORM_JPG); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlMultiPageUnlabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); testingContainerUrlRunner(fileUrl -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, false, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataUnlabeled(syncPoller.getFinalResult()); }), MULTIPAGE_INVOICE_PDF); } /** * Verifies that an exception is thrown for invalid training data source. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); HttpResponseException httpResponseException = assertThrows( HttpResponseException.class, () -> client.beginRecognizeCustomFormsFromUrl( createdModel.getModelId(), INVALID_URL, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE).getFinalResult()); final FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(INVALID_SOURCE_URL_ERROR_CODE, errorInformation.getErrorCode()); }); } /** * Verifies an exception thrown for a null model id when recognizing custom form from URL. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormFromUrlLabeledDataWithNullModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> { Exception ex = assertThrows(RuntimeException.class, () -> client.beginRecognizeCustomFormsFromUrl( null, fileUrl, new RecognizeCustomFormsOptions().setPollInterval(durationTestMode), Context.NONE)); assertEquals(MODEL_ID_IS_REQUIRED_EXCEPTION_MESSAGE, ex.getMessage()); }, FORM_JPG); } /** * Verifies an exception thrown for an empty model id for recognizing custom forms from URL. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormFromUrlLabeledDataWithEmptyModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingMultipageRunner((trainingFilesUrl) -> { IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> client.beginRecognizeCustomFormsFromUrl("", fileUrl, new RecognizeCustomFormsOptions().setPollInterval(durationTestMode), Context.NONE)); assertEquals(INVALID_UUID_EXCEPTION_MESSAGE, ex.getMessage()); }), FORM_JPG); } /** * Verifies custom form data for an URL document data with labeled data and include element references */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlLabeledDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setFieldElementsIncluded(true).setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), FORM_JPG); } /** * Verifies custom form data for an URL document data with labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlLabeledData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { urlRunner(fileUrl -> beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { client = getFormRecognizerClient(httpClient, serviceVersion); SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), false, true); }), FORM_JPG); } /** * Verify custom form for an URL of multi-page labeled data */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlMultiPageLabeled(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> beginTrainingMultipageRunner((trainingFilesUrl) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, true, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); String modelId = trainingPoller.getFinalResult().getModelId(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl( modelId, fileUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateMultiPageDataLabeled(syncPoller.getFinalResult(), modelId); }), MULTIPAGE_INVOICE_PDF); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with \ * encoded blank space as input data to recognize a custom form from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeCustomFormFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client .beginRecognizeCustomFormsFromUrl(NON_EXIST_MODEL_ID, sourceUrl, new RecognizeCustomFormsOptions() .setPollInterval(durationTestMode), Context.NONE)); validateExceptionSource(errorResponseException); }); } /** * Verify that custom forom with invalid model id. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlNonExistModelId(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(fileUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeCustomFormsFromUrl(NON_EXIST_MODEL_ID, fileUrl, new RecognizeCustomFormsOptions().setPollInterval(durationTestMode), Context.NONE)); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) errorResponseException.getValue(); assertEquals(INVALID_MODEL_ID_ERROR_CODE, errorInformation.getErrorCode()); }, FORM_JPG); } /** * Verify that custom form with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeCustomForms(trainingPoller.getFinalResult().getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(APPLICATION_PDF).setPollInterval(durationTestMode), Context.NONE).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals("Invalid input file.", errorInformation.getMessage()); })); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeCustomFormUrlLabeledDataWithSelectionMark(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { urlRunner(fileUrl -> beginSelectionMarkTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { client = getFormRecognizerClient(httpClient, serviceVersion); SyncPoller<FormRecognizerOperationResult, CustomFormModel> trainingPoller = getFormTrainingClient(httpClient, serviceVersion).beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); trainingPoller.waitForCompletion(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeCustomFormsFromUrl(trainingPoller.getFinalResult().getModelId(), fileUrl, new RecognizeCustomFormsOptions().setFieldElementsIncluded(true) .setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); validateRecognizedResult(syncPoller.getFinalResult(), true, true); }), SELECTION_MARK_PDF); } /** * Verifies recognized form type when labeled model used for recognition and model name is provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizeFormTypeLabeledWithModelName( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model1"), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("custom:model1", recognizedForm.getFormType()); assertNotNull(recognizedForm.getFormTypeConfidence()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("custom:model1", submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when labeled model used for recognition and model name is not provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizedFormTypeLabeledModel( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("custom:" + createdModel.getModelId(), recognizedForm.getFormType()); assertNotNull(recognizedForm.getFormTypeConfidence()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("custom:" + createdModel.getModelId(), submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when unlabeled model used for recognition and model name is not provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizedFormTypeUnlabeledModel( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("form-0", recognizedForm.getFormType()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("form-0", submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when unlabeled model used for recognition and model name is provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizedFormTypeUnlabeledModelWithModelName( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingUnlabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model1"), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller1 = formRecognizerClient.beginRecognizeCustomForms( createdModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller1.getFinalResult().stream().findFirst().get(); assertEquals("form-0", recognizedForm.getFormType()); final CustomFormSubmodel submodel = createdModel.getSubmodels().get(0); assertEquals("form-0", submodel.getFormType()); formTrainingClient.deleteModel(createdModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when using composed model for recognition when display name is not provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizeFormTypeComposedModel( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller1 = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode), Context.NONE); syncPoller1.waitForCompletion(); CustomFormModel createdModel1 = syncPoller1.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller2 = formTrainingClient.beginCreateComposedModel( Arrays.asList(createdModel.getModelId(), createdModel1.getModelId()), new CreateComposedModelOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller2.waitForCompletion(); CustomFormModel composedModel = syncPoller2.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller3 = formRecognizerClient.beginRecognizeCustomForms( composedModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller3.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller3.getFinalResult().stream().findFirst().get(); if (recognizedForm.getFormType().equals("custom:" + createdModel1.getModelId()) || recognizedForm.getFormType().equals("custom:" + createdModel.getModelId())) { assertTrue(true); } else { fail(); } assertNotNull(recognizedForm.getFormTypeConfidence()); composedModel.getSubmodels() .forEach(customFormSubmodel -> { if (createdModel.getModelId().equals(customFormSubmodel.getModelId())) { assertEquals("custom:" + createdModel.getModelId(), customFormSubmodel.getFormType()); } else { assertEquals("custom:" + createdModel1.getModelId(), customFormSubmodel.getFormType()); } }); formTrainingClient.deleteModel(createdModel.getModelId()); formTrainingClient.deleteModel(createdModel1.getModelId()); formTrainingClient.deleteModel(composedModel.getModelId()); }); }, FORM_JPG); } /** * Verifies recognized form type when using composed model for recognition when model name is provided by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void checkRecognizeFormTypeComposedModelWithModelName( HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { final FormTrainingClient formTrainingClient = getFormTrainingClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { beginTrainingLabeledRunner((trainingFilesUrl, useTrainingLabels) -> { SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model1"), Context.NONE); syncPoller.waitForCompletion(); CustomFormModel createdModel = syncPoller.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller1 = formTrainingClient.beginTraining(trainingFilesUrl, useTrainingLabels, new TrainingOptions().setPollInterval(durationTestMode).setModelName("model2"), Context.NONE); syncPoller1.waitForCompletion(); CustomFormModel createdModel1 = syncPoller1.getFinalResult(); SyncPoller<FormRecognizerOperationResult, CustomFormModel> syncPoller2 = formTrainingClient.beginCreateComposedModel( Arrays.asList(createdModel.getModelId(), createdModel1.getModelId()), new CreateComposedModelOptions().setModelName("composedModelName"), Context.NONE).setPollInterval(durationTestMode); syncPoller2.waitForCompletion(); CustomFormModel composedModel = syncPoller2.getFinalResult(); FormRecognizerClient formRecognizerClient = getFormTrainingClient(httpClient, serviceVersion) .getFormRecognizerClient(); SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller3 = formRecognizerClient.beginRecognizeCustomForms( composedModel.getModelId(), data, dataLength, new RecognizeCustomFormsOptions() .setContentType(FormContentType.IMAGE_JPEG).setPollInterval(durationTestMode), Context.NONE); syncPoller3.waitForCompletion(); final RecognizedForm recognizedForm = syncPoller3.getFinalResult().stream().findFirst().get(); String expectedFormType1 = "composedModelName:model1"; String expectedFormType2 = "composedModelName:model2"; assertTrue(expectedFormType1.equals(recognizedForm.getFormType()) || expectedFormType2.equals(recognizedForm.getFormType())); assertNotNull(recognizedForm.getFormTypeConfidence()); formTrainingClient.deleteModel(createdModel.getModelId()); formTrainingClient.deleteModel(createdModel1.getModelId()); formTrainingClient.deleteModel(composedModel.getModelId()); }); }, FORM_JPG); } /** * Verifies business card data for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(FormContentType.IMAGE_JPEG), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeBusinessCards( null, 0)); } /** * Verifies content type will be auto detected when using business card API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(getContentDetectionFileData(filePath), dataLength, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies business card data for a document using source as as input stream data and text content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(FormContentType.IMAGE_JPEG) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies business card data from a document using PNG file data as source and including text content details. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType( FormContentType.IMAGE_PNG).setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_PNG); } /** * Verifies business card data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(APPLICATION_PDF), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateBlankPdfResultData(syncPoller.getFinalResult()); }, BLANK_PDF); } /** * Verify that business card recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions().setContentType(APPLICATION_PDF), Context.NONE) .setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verify business card recognition with multipage pdf. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeMultipageBusinessCard(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCards(data, dataLength, new RecognizeBusinessCardsOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateMultipageBusinessData(syncPoller.getFinalResult()); }, MULTIPAGE_BUSINESS_CARD_PDF); } /** * Verifies business card data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((sourceUrl) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize business card from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeBusinessCardFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode)); validateExceptionSource(errorResponseException); }); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((sourceUrl) -> assertThrows(HttpResponseException.class, () -> client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions(), Context.NONE).setPollInterval(durationTestMode))); } /** * Verifies business card data for a document using source as file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions().setFieldElementsIncluded(true), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_JPG); } /** * Verifies business card data for a document using source as PNG file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeBusinessCardSourceUrlWithPngFile(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions().setFieldElementsIncluded(true), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, BUSINESS_CARD); }, BUSINESS_CARD_PNG); } /** * Verify business card recognition with multipage pdf url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeMultipageBusinessCardUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeBusinessCardsFromUrl(sourceUrl, new RecognizeBusinessCardsOptions() .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateMultipageBusinessData(syncPoller.getFinalResult()); }, MULTIPAGE_BUSINESS_CARD_PDF); } /** * Verify locale parameter passed when specified by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void receiptValidLocale(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { client.beginRecognizeReceipts( getContentDetectionFileData(filePath), dataLength, new RecognizeReceiptsOptions().setPollInterval(durationTestMode) .setLocale(FormRecognizerLocale.EN_US), Context.NONE); validateNetworkCallRecord("locale", "en-US"); }, RECEIPT_CONTOSO_JPG); } /** * Verifies invoice data recognition for a document using source as input stream data. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions().setContentType(APPLICATION_PDF), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, INVOICE); }, INVOICE_PDF); } /** * Verifies content type will be auto detected when using invoice API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(getContentDetectionFileData(filePath), dataLength, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, INVOICE); }, INVOICE_PDF); } /** * Verifies invoice data for a document using source as as input stream data and text content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, INVOICE); }, INVOICE_PDF); } /** * Verifies invoice data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceDataWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions().setContentType(APPLICATION_PDF), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateBlankPdfResultData(syncPoller.getFinalResult()); }, BLANK_PDF); } /** * Verify that invoice recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions().setContentType(APPLICATION_PDF), Context.NONE).setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verify invoice data recognition with multipage pdf. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeMultipageInvoice(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoices(data, dataLength, new RecognizeInvoicesOptions() .setContentType(APPLICATION_PDF) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validateMultipageInvoiceData(syncPoller.getFinalResult()); }, MULTIPAGE_VENDOR_INVOICE_PDF); } /** * Verifies invoice card data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner((sourceUrl) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, INVOICE); }, INVOICE_PDF); } /** * Verifies encoded blank url must stay same when sent to service for a document using invalid source url with * encoded blank space as input data to recognize invoice card from url API. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils @Disabled("https: public void recognizeInvoiceFromUrlWithEncodedBlankSpaceSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); encodedBlankSpaceSourceUrlRunner(sourceUrl -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode)); validateExceptionSource(errorResponseException); }); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((sourceUrl) -> assertThrows(HttpResponseException.class, () -> client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions(), Context.NONE).setPollInterval(durationTestMode))); } /** * Verifies invoice data for a document using source as file url and include form element references * when includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeInvoiceFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeInvoicesFromUrl(sourceUrl, new RecognizeInvoicesOptions().setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, INVOICE); }, INVOICE_PDF); } /** * Verify locale parameter passed when specified by user. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void invoiceValidLocale(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { client.beginRecognizeInvoices( getContentDetectionFileData(filePath), dataLength, new RecognizeInvoicesOptions().setLocale(FormRecognizerLocale.EN_US), Context.NONE).setPollInterval(durationTestMode); validateNetworkCallRecord("locale", "en-US"); }, INVOICE_PDF); } /** * Verifies license card data from a document using file data as source. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseCardData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments(data, dataLength, new RecognizeIdentityDocumentOptions().setContentType(FormContentType.IMAGE_JPEG), Context.NONE) .setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies an exception thrown for a document using null data value. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentDataNullData(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); assertThrows(NullPointerException.class, () -> client.beginRecognizeIdentityDocuments( null, 0)); } /** * Verifies content type will be auto detected when using custom form API with input stream data overload. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseDataWithContentTypeAutoDetection(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); localFilePathRunner((filePath, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments( getContentDetectionFileData(filePath), dataLength, new RecognizeIdentityDocumentOptions(), Context.NONE ).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies identity document data from a document using file data as source and including element reference details. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseDataIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments(data, dataLength, new RecognizeIdentityDocumentOptions() .setContentType(FormContentType.IMAGE_JPEG) .setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies identity document data from a document using blank PDF. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentWithBlankPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); dataRunner((data, dataLength) -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocuments( data, dataLength, new RecognizeIdentityDocumentOptions() .setContentType(FormContentType.APPLICATION_PDF), Context.NONE ).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); assertEquals(0, syncPoller.getFinalResult().size()); }, BLANK_PDF); } /** * Verify that identity document recognition with damaged PDF file. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentFromDamagedPdf(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); damagedPdfDataRunner((data, dataLength) -> { HttpResponseException httpResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeIdentityDocuments( data, dataLength, new RecognizeIdentityDocumentOptions() .setContentType(FormContentType.APPLICATION_PDF), Context.NONE ).setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) httpResponseException.getValue(); assertEquals(BAD_ARGUMENT_CODE, errorInformation.getErrorCode()); }); } /** * Verifies business card data for a document using source as file url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeLicenseSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocumentsFromUrl(sourceUrl, new RecognizeIdentityDocumentOptions(), Context.NONE ).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), false, IDENTITY); }, LICENSE_CARD_JPG); } /** * Verifies that an exception is thrown for invalid source url. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentInvalidSourceUrl(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); invalidSourceUrlRunner((invalidSourceUrl) -> { HttpResponseException errorResponseException = assertThrows(HttpResponseException.class, () -> client.beginRecognizeIdentityDocumentsFromUrl( invalidSourceUrl, new RecognizeIdentityDocumentOptions(), Context.NONE ).setPollInterval(durationTestMode).getFinalResult()); FormRecognizerErrorInformation errorInformation = (FormRecognizerErrorInformation) errorResponseException.getValue(); assertEquals(INVALID_IMAGE_URL_ERROR_CODE, errorInformation.getErrorCode()); }); } /** * Verifies license identity data for a document using source as file url and include content when * includeFieldElements is true. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("com.azure.ai.formrecognizer.TestUtils public void recognizeIDDocumentFromUrlIncludeFieldElements(HttpClient httpClient, FormRecognizerServiceVersion serviceVersion) { client = getFormRecognizerClient(httpClient, serviceVersion); urlRunner(sourceUrl -> { SyncPoller<FormRecognizerOperationResult, List<RecognizedForm>> syncPoller = client.beginRecognizeIdentityDocumentsFromUrl(sourceUrl, new RecognizeIdentityDocumentOptions().setFieldElementsIncluded(true), Context.NONE).setPollInterval(durationTestMode); syncPoller.waitForCompletion(); validatePrebuiltResultData(syncPoller.getFinalResult(), true, IDENTITY); }, LICENSE_CARD_JPG); } }
I'd suggest to add a warning message in log to warn users still having this property configed
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity");
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
is there any side effect with this remove?
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri");
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
Maybe it's not necessary, because: 1. If customer use `azure-spring-boot-starter-keyvault-certificates`, `azure.keyvault.aad-authentication-url` never exist in `KeyVaultProperties.java`, so customer never know this property exists. Refs: https://github.com/Azure/azure-sdk-for-java/commits/7989020420f8354b9f6c1597d68c06acddab63d8/sdk/spring/azure-spring-boot-starter-keyvault-certificates/src/main/java/com/azure/spring/security/keyvault/certificates/starter/KeyVaultProperties.java 2. If customer use `azure-security-keyvault-jca`, this already written in changelog: Refs: https://github.com/Azure/azure-sdk-for-java/blob/7989020420f8354b9f6c1597d68c06acddab63d8/sdk/keyvault/azure-security-keyvault-jca/CHANGELOG.md#breaking-changes
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity");
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
No side effect, I think.
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri");
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.uri"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.tenant-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-id"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.client-secret"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.managed-identity"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.certificates-refresh-interval"); putEnvironmentPropertyToSystemProperty(environment, "azure.keyvault.jca.refresh-certificates-when-have-un-trust-certificate"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.well-known"); putEnvironmentPropertyToSystemProperty(environment, "azure.cert-path.custom"); MutablePropertySources propertySources = environment.getPropertySources(); if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.key-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.key-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.key-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("KeyStorePropertySource", properties)); } if (KeyVaultKeyStore.KEY_STORE_TYPE.equals(environment.getProperty("server.ssl.trust-store-type"))) { Properties properties = new Properties(); properties.put("server.ssl.trust-store", "classpath:keyvault.dummy"); if (hasEmbedTomcat()) { properties.put("server.ssl.trust-store-type", "DKS"); } propertySources.addFirst(new PropertiesPropertySource("TrustStorePropertySource", properties)); } Security.insertProviderAt(new KeyVaultJcaProvider(), 1); if (overrideTrustManagerFactory(environment)) { Security.insertProviderAt(new KeyVaultTrustManagerFactoryProvider(), 1); } if (disableHostnameVerification(environment)) { HttpsURLConnection.setDefaultHostnameVerifier((hostname, session) -> true); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
class KeyVaultCertificatesEnvironmentPostProcessor implements EnvironmentPostProcessor { @Override /** * The method is use to make the properties in "application.properties" readable in azure-security-keyvault-jca. * * "application.properties" is analyzed by Spring, and azure-security-keyvault-jca does not depends on Spring. * Put the properties into System.getProperties() can make them readable in azure-security-keyvault-jca. */ private void putEnvironmentPropertyToSystemProperty(ConfigurableEnvironment environment, String key) { Optional.of(key) .map(environment::getProperty) .filter(StringUtils::hasText) .ifPresent(value -> System.getProperties().put(key, value)); } private boolean hasEmbedTomcat() { try { Class.forName("org.apache.tomcat.InstanceManager"); return true; } catch (ClassNotFoundException ex) { return false; } } static boolean overrideTrustManagerFactory(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.overrideTrustManagerFactory") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.override-trust-manager-factory"); } private static boolean disableHostnameVerification(ConfigurableEnvironment environment) { return environmentPropertyIsTrue(environment, "azure.keyvault.jca.disableHostnameVerification") || environmentPropertyIsTrue(environment, "azure.keyvault.jca.disable-hostname-verification"); } private static boolean environmentPropertyIsTrue(ConfigurableEnvironment environment, String key) { return Optional.of(key) .map(environment::getProperty) .map(Boolean::parseBoolean) .orElse(false); } }
Does the generated xml string have the value of `RULE_VALUE_ATTRIBUTE_XML `?
public String serialize(Object object, SerializerEncoding encoding) throws IOException { final String contents = jacksonAdapter.serialize(object, encoding); final Class<?> clazz = object.getClass(); if (!CreateQueueBody.class.equals(clazz) && !CreateRuleBody.class.equals(clazz) && !CreateSubscriptionBody.class.equals(clazz)) { return contents; } final Matcher namespaceMatcher = NAMESPACE_PATTERN.matcher(contents); if (!namespaceMatcher.find()) { logger.warning("Could not find {} in {}", NAMESPACE_PATTERN.pattern(), contents); return contents; } final String namespace = namespaceMatcher.group("namespace"); String replaced = contents .replaceAll(namespace + ":", "") .replace("xmlns:" + namespace + "=", "xmlns="); if (!CreateRuleBody.class.equals(clazz)) { return replaced; } final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced); if (filterValue.find()) { replaced = filterValue.replaceAll(RULE_VALUE_ATTRIBUTE_XML); } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_VALUE_PATTERN.pattern(), contents); } final Matcher filterType = FILTER_ACTION_PATTERN.matcher(replaced); if (filterType.find()) { return filterType.replaceAll("<$1 xmlns:ns0=\"http: } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_ACTION_PATTERN.pattern(), contents); return replaced; } }
final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced);
public String serialize(Object object, SerializerEncoding encoding) throws IOException { final String contents = jacksonAdapter.serialize(object, encoding); final Class<?> clazz = object.getClass(); if (!CreateQueueBody.class.equals(clazz) && !CreateRuleBody.class.equals(clazz) && !CreateSubscriptionBody.class.equals(clazz)) { return contents; } final Matcher namespaceMatcher = NAMESPACE_PATTERN.matcher(contents); if (!namespaceMatcher.find()) { logger.warning("Could not find {} in {}", NAMESPACE_PATTERN.pattern(), contents); return contents; } final String namespace = namespaceMatcher.group("namespace"); String replaced = contents .replaceAll(namespace + ":", "") .replace("xmlns:" + namespace + "=", "xmlns="); if (!CreateRuleBody.class.equals(clazz)) { return replaced; } if (CreateRuleBody.class.equals(clazz)) { final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced); if (filterValue.find()) { replaced = filterValue.replaceAll(RULE_VALUE_ATTRIBUTE_XML); } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_VALUE_PATTERN.pattern(), contents); } } final Matcher filterType = FILTER_ACTION_PATTERN.matcher(replaced); if (filterType.find()) { return filterType.replaceAll("<$1 xmlns:ns0=\"http: } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_ACTION_PATTERN.pattern(), contents); return replaced; } }
class ServiceBusManagementSerializer implements SerializerAdapter { private static final String MINIMUM_DATETIME_FORMATTED = ">0001-01-01T00:00:00Z</"; private static final Pattern MINIMUM_DATETIME_PATTERN = Pattern.compile(">0001-01-01T00:00:00</", Pattern.MULTILINE); private static final Pattern NAMESPACE_PATTERN = Pattern.compile( "xmlns:(?<namespace>\\w+)=\"http: Pattern.MULTILINE); private static final Pattern FILTER_ACTION_PATTERN = Pattern.compile("<(Filter|Action) type=", Pattern.MULTILINE); private static final Pattern FILTER_VALUE_PATTERN = Pattern.compile("<(Value)", Pattern.MULTILINE); private static final String RULE_VALUE_ATTRIBUTE_XML = "<$1 xmlns:d6p1=\"http: private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); private final ClientLogger logger = new ClientLogger(ServiceBusManagementSerializer.class); @Override @Override public String serializeRaw(Object object) { return jacksonAdapter.serializeRaw(object); } @Override public String serializeList(List<?> list, CollectionFormat format) { return jacksonAdapter.serializeList(list, format); } public <T> T deserialize(String value, Type type) throws IOException { final Matcher matcher = MINIMUM_DATETIME_PATTERN.matcher(value); final String serializedString; if (matcher.find(0)) { logger.verbose("Found instances of '{}' to replace. Value: {}", MINIMUM_DATETIME_PATTERN.pattern(), value); serializedString = matcher.replaceAll(MINIMUM_DATETIME_FORMATTED); } else { serializedString = value; } return jacksonAdapter.deserialize(serializedString, type, SerializerEncoding.XML); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (encoding != SerializerEncoding.XML) { return jacksonAdapter.deserialize(value, type, encoding); } if (Object.class == type) { return (T) value; } else { return (T) deserialize(value, type); } } @Override public <T> T deserialize(HttpHeaders headers, Type type) throws IOException { return jacksonAdapter.deserialize(headers, type); } }
class ServiceBusManagementSerializer implements SerializerAdapter { private static final String MINIMUM_DATETIME_FORMATTED = ">0001-01-01T00:00:00Z</"; private static final Pattern MINIMUM_DATETIME_PATTERN = Pattern.compile(">0001-01-01T00:00:00</", Pattern.MULTILINE); private static final Pattern NAMESPACE_PATTERN = Pattern.compile( "xmlns:(?<namespace>\\w+)=\"http: Pattern.MULTILINE); private static final Pattern FILTER_ACTION_PATTERN = Pattern.compile("<(Filter|Action) type=", Pattern.MULTILINE); private static final Pattern FILTER_VALUE_PATTERN = Pattern.compile("<(Value)", Pattern.MULTILINE); private static final String RULE_VALUE_ATTRIBUTE_XML = "<$1 xmlns:d6p1=\"http: private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); private final ClientLogger logger = new ClientLogger(ServiceBusManagementSerializer.class); @Override @Override public String serializeRaw(Object object) { return jacksonAdapter.serializeRaw(object); } @Override public String serializeList(List<?> list, CollectionFormat format) { return jacksonAdapter.serializeList(list, format); } public <T> T deserialize(String value, Type type) throws IOException { final Matcher matcher = MINIMUM_DATETIME_PATTERN.matcher(value); final String serializedString; if (matcher.find(0)) { logger.verbose("Found instances of '{}' to replace. Value: {}", MINIMUM_DATETIME_PATTERN.pattern(), value); serializedString = matcher.replaceAll(MINIMUM_DATETIME_FORMATTED); } else { serializedString = value; } return jacksonAdapter.deserialize(serializedString, type, SerializerEncoding.XML); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (encoding != SerializerEncoding.XML) { return jacksonAdapter.deserialize(value, type, encoding); } if (Object.class == type) { return (T) value; } else { return (T) deserialize(value, type); } } @Override public <T> T deserialize(HttpHeaders headers, Type type) throws IOException { return jacksonAdapter.deserialize(headers, type); } }
Better put this new code logic in a `if` statement because this applies to only RuleFilter
public String serialize(Object object, SerializerEncoding encoding) throws IOException { final String contents = jacksonAdapter.serialize(object, encoding); final Class<?> clazz = object.getClass(); if (!CreateQueueBody.class.equals(clazz) && !CreateRuleBody.class.equals(clazz) && !CreateSubscriptionBody.class.equals(clazz)) { return contents; } final Matcher namespaceMatcher = NAMESPACE_PATTERN.matcher(contents); if (!namespaceMatcher.find()) { logger.warning("Could not find {} in {}", NAMESPACE_PATTERN.pattern(), contents); return contents; } final String namespace = namespaceMatcher.group("namespace"); String replaced = contents .replaceAll(namespace + ":", "") .replace("xmlns:" + namespace + "=", "xmlns="); if (!CreateRuleBody.class.equals(clazz)) { return replaced; } final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced); if (filterValue.find()) { replaced = filterValue.replaceAll(RULE_VALUE_ATTRIBUTE_XML); } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_VALUE_PATTERN.pattern(), contents); } final Matcher filterType = FILTER_ACTION_PATTERN.matcher(replaced); if (filterType.find()) { return filterType.replaceAll("<$1 xmlns:ns0=\"http: } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_ACTION_PATTERN.pattern(), contents); return replaced; } }
final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced);
public String serialize(Object object, SerializerEncoding encoding) throws IOException { final String contents = jacksonAdapter.serialize(object, encoding); final Class<?> clazz = object.getClass(); if (!CreateQueueBody.class.equals(clazz) && !CreateRuleBody.class.equals(clazz) && !CreateSubscriptionBody.class.equals(clazz)) { return contents; } final Matcher namespaceMatcher = NAMESPACE_PATTERN.matcher(contents); if (!namespaceMatcher.find()) { logger.warning("Could not find {} in {}", NAMESPACE_PATTERN.pattern(), contents); return contents; } final String namespace = namespaceMatcher.group("namespace"); String replaced = contents .replaceAll(namespace + ":", "") .replace("xmlns:" + namespace + "=", "xmlns="); if (!CreateRuleBody.class.equals(clazz)) { return replaced; } if (CreateRuleBody.class.equals(clazz)) { final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced); if (filterValue.find()) { replaced = filterValue.replaceAll(RULE_VALUE_ATTRIBUTE_XML); } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_VALUE_PATTERN.pattern(), contents); } } final Matcher filterType = FILTER_ACTION_PATTERN.matcher(replaced); if (filterType.find()) { return filterType.replaceAll("<$1 xmlns:ns0=\"http: } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_ACTION_PATTERN.pattern(), contents); return replaced; } }
class ServiceBusManagementSerializer implements SerializerAdapter { private static final String MINIMUM_DATETIME_FORMATTED = ">0001-01-01T00:00:00Z</"; private static final Pattern MINIMUM_DATETIME_PATTERN = Pattern.compile(">0001-01-01T00:00:00</", Pattern.MULTILINE); private static final Pattern NAMESPACE_PATTERN = Pattern.compile( "xmlns:(?<namespace>\\w+)=\"http: Pattern.MULTILINE); private static final Pattern FILTER_ACTION_PATTERN = Pattern.compile("<(Filter|Action) type=", Pattern.MULTILINE); private static final Pattern FILTER_VALUE_PATTERN = Pattern.compile("<(Value)", Pattern.MULTILINE); private static final String RULE_VALUE_ATTRIBUTE_XML = "<$1 xmlns:d6p1=\"http: private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); private final ClientLogger logger = new ClientLogger(ServiceBusManagementSerializer.class); @Override @Override public String serializeRaw(Object object) { return jacksonAdapter.serializeRaw(object); } @Override public String serializeList(List<?> list, CollectionFormat format) { return jacksonAdapter.serializeList(list, format); } public <T> T deserialize(String value, Type type) throws IOException { final Matcher matcher = MINIMUM_DATETIME_PATTERN.matcher(value); final String serializedString; if (matcher.find(0)) { logger.verbose("Found instances of '{}' to replace. Value: {}", MINIMUM_DATETIME_PATTERN.pattern(), value); serializedString = matcher.replaceAll(MINIMUM_DATETIME_FORMATTED); } else { serializedString = value; } return jacksonAdapter.deserialize(serializedString, type, SerializerEncoding.XML); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (encoding != SerializerEncoding.XML) { return jacksonAdapter.deserialize(value, type, encoding); } if (Object.class == type) { return (T) value; } else { return (T) deserialize(value, type); } } @Override public <T> T deserialize(HttpHeaders headers, Type type) throws IOException { return jacksonAdapter.deserialize(headers, type); } }
class ServiceBusManagementSerializer implements SerializerAdapter { private static final String MINIMUM_DATETIME_FORMATTED = ">0001-01-01T00:00:00Z</"; private static final Pattern MINIMUM_DATETIME_PATTERN = Pattern.compile(">0001-01-01T00:00:00</", Pattern.MULTILINE); private static final Pattern NAMESPACE_PATTERN = Pattern.compile( "xmlns:(?<namespace>\\w+)=\"http: Pattern.MULTILINE); private static final Pattern FILTER_ACTION_PATTERN = Pattern.compile("<(Filter|Action) type=", Pattern.MULTILINE); private static final Pattern FILTER_VALUE_PATTERN = Pattern.compile("<(Value)", Pattern.MULTILINE); private static final String RULE_VALUE_ATTRIBUTE_XML = "<$1 xmlns:d6p1=\"http: private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); private final ClientLogger logger = new ClientLogger(ServiceBusManagementSerializer.class); @Override @Override public String serializeRaw(Object object) { return jacksonAdapter.serializeRaw(object); } @Override public String serializeList(List<?> list, CollectionFormat format) { return jacksonAdapter.serializeList(list, format); } public <T> T deserialize(String value, Type type) throws IOException { final Matcher matcher = MINIMUM_DATETIME_PATTERN.matcher(value); final String serializedString; if (matcher.find(0)) { logger.verbose("Found instances of '{}' to replace. Value: {}", MINIMUM_DATETIME_PATTERN.pattern(), value); serializedString = matcher.replaceAll(MINIMUM_DATETIME_FORMATTED); } else { serializedString = value; } return jacksonAdapter.deserialize(serializedString, type, SerializerEncoding.XML); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (encoding != SerializerEncoding.XML) { return jacksonAdapter.deserialize(value, type, encoding); } if (Object.class == type) { return (T) value; } else { return (T) deserialize(value, type); } } @Override public <T> T deserialize(HttpHeaders headers, Type type) throws IOException { return jacksonAdapter.deserialize(headers, type); } }
According to your Comment, fixed in new version.
public String serialize(Object object, SerializerEncoding encoding) throws IOException { final String contents = jacksonAdapter.serialize(object, encoding); final Class<?> clazz = object.getClass(); if (!CreateQueueBody.class.equals(clazz) && !CreateRuleBody.class.equals(clazz) && !CreateSubscriptionBody.class.equals(clazz)) { return contents; } final Matcher namespaceMatcher = NAMESPACE_PATTERN.matcher(contents); if (!namespaceMatcher.find()) { logger.warning("Could not find {} in {}", NAMESPACE_PATTERN.pattern(), contents); return contents; } final String namespace = namespaceMatcher.group("namespace"); String replaced = contents .replaceAll(namespace + ":", "") .replace("xmlns:" + namespace + "=", "xmlns="); if (!CreateRuleBody.class.equals(clazz)) { return replaced; } final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced); if (filterValue.find()) { replaced = filterValue.replaceAll(RULE_VALUE_ATTRIBUTE_XML); } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_VALUE_PATTERN.pattern(), contents); } final Matcher filterType = FILTER_ACTION_PATTERN.matcher(replaced); if (filterType.find()) { return filterType.replaceAll("<$1 xmlns:ns0=\"http: } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_ACTION_PATTERN.pattern(), contents); return replaced; } }
final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced);
public String serialize(Object object, SerializerEncoding encoding) throws IOException { final String contents = jacksonAdapter.serialize(object, encoding); final Class<?> clazz = object.getClass(); if (!CreateQueueBody.class.equals(clazz) && !CreateRuleBody.class.equals(clazz) && !CreateSubscriptionBody.class.equals(clazz)) { return contents; } final Matcher namespaceMatcher = NAMESPACE_PATTERN.matcher(contents); if (!namespaceMatcher.find()) { logger.warning("Could not find {} in {}", NAMESPACE_PATTERN.pattern(), contents); return contents; } final String namespace = namespaceMatcher.group("namespace"); String replaced = contents .replaceAll(namespace + ":", "") .replace("xmlns:" + namespace + "=", "xmlns="); if (!CreateRuleBody.class.equals(clazz)) { return replaced; } if (CreateRuleBody.class.equals(clazz)) { final Matcher filterValue = FILTER_VALUE_PATTERN.matcher(replaced); if (filterValue.find()) { replaced = filterValue.replaceAll(RULE_VALUE_ATTRIBUTE_XML); } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_VALUE_PATTERN.pattern(), contents); } } final Matcher filterType = FILTER_ACTION_PATTERN.matcher(replaced); if (filterType.find()) { return filterType.replaceAll("<$1 xmlns:ns0=\"http: } else { logger.warning("Could not find filter name pattern '{}' in {}.", FILTER_ACTION_PATTERN.pattern(), contents); return replaced; } }
class ServiceBusManagementSerializer implements SerializerAdapter { private static final String MINIMUM_DATETIME_FORMATTED = ">0001-01-01T00:00:00Z</"; private static final Pattern MINIMUM_DATETIME_PATTERN = Pattern.compile(">0001-01-01T00:00:00</", Pattern.MULTILINE); private static final Pattern NAMESPACE_PATTERN = Pattern.compile( "xmlns:(?<namespace>\\w+)=\"http: Pattern.MULTILINE); private static final Pattern FILTER_ACTION_PATTERN = Pattern.compile("<(Filter|Action) type=", Pattern.MULTILINE); private static final Pattern FILTER_VALUE_PATTERN = Pattern.compile("<(Value)", Pattern.MULTILINE); private static final String RULE_VALUE_ATTRIBUTE_XML = "<$1 xmlns:d6p1=\"http: private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); private final ClientLogger logger = new ClientLogger(ServiceBusManagementSerializer.class); @Override @Override public String serializeRaw(Object object) { return jacksonAdapter.serializeRaw(object); } @Override public String serializeList(List<?> list, CollectionFormat format) { return jacksonAdapter.serializeList(list, format); } public <T> T deserialize(String value, Type type) throws IOException { final Matcher matcher = MINIMUM_DATETIME_PATTERN.matcher(value); final String serializedString; if (matcher.find(0)) { logger.verbose("Found instances of '{}' to replace. Value: {}", MINIMUM_DATETIME_PATTERN.pattern(), value); serializedString = matcher.replaceAll(MINIMUM_DATETIME_FORMATTED); } else { serializedString = value; } return jacksonAdapter.deserialize(serializedString, type, SerializerEncoding.XML); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (encoding != SerializerEncoding.XML) { return jacksonAdapter.deserialize(value, type, encoding); } if (Object.class == type) { return (T) value; } else { return (T) deserialize(value, type); } } @Override public <T> T deserialize(HttpHeaders headers, Type type) throws IOException { return jacksonAdapter.deserialize(headers, type); } }
class ServiceBusManagementSerializer implements SerializerAdapter { private static final String MINIMUM_DATETIME_FORMATTED = ">0001-01-01T00:00:00Z</"; private static final Pattern MINIMUM_DATETIME_PATTERN = Pattern.compile(">0001-01-01T00:00:00</", Pattern.MULTILINE); private static final Pattern NAMESPACE_PATTERN = Pattern.compile( "xmlns:(?<namespace>\\w+)=\"http: Pattern.MULTILINE); private static final Pattern FILTER_ACTION_PATTERN = Pattern.compile("<(Filter|Action) type=", Pattern.MULTILINE); private static final Pattern FILTER_VALUE_PATTERN = Pattern.compile("<(Value)", Pattern.MULTILINE); private static final String RULE_VALUE_ATTRIBUTE_XML = "<$1 xmlns:d6p1=\"http: private final JacksonAdapter jacksonAdapter = new JacksonAdapter(); private final ClientLogger logger = new ClientLogger(ServiceBusManagementSerializer.class); @Override @Override public String serializeRaw(Object object) { return jacksonAdapter.serializeRaw(object); } @Override public String serializeList(List<?> list, CollectionFormat format) { return jacksonAdapter.serializeList(list, format); } public <T> T deserialize(String value, Type type) throws IOException { final Matcher matcher = MINIMUM_DATETIME_PATTERN.matcher(value); final String serializedString; if (matcher.find(0)) { logger.verbose("Found instances of '{}' to replace. Value: {}", MINIMUM_DATETIME_PATTERN.pattern(), value); serializedString = matcher.replaceAll(MINIMUM_DATETIME_FORMATTED); } else { serializedString = value; } return jacksonAdapter.deserialize(serializedString, type, SerializerEncoding.XML); } @Override @SuppressWarnings("unchecked") public <T> T deserialize(String value, Type type, SerializerEncoding encoding) throws IOException { if (encoding != SerializerEncoding.XML) { return jacksonAdapter.deserialize(value, type, encoding); } if (Object.class == type) { return (T) value; } else { return (T) deserialize(value, type); } } @Override public <T> T deserialize(HttpHeaders headers, Type type) throws IOException { return jacksonAdapter.deserialize(headers, type); } }
This should be uncommented?
public static void main(String[] args) { final MetricsAdvisorAdministrationClient advisorAdministrationClient = new MetricsAdvisorAdministrationClientBuilder() .endpoint("https: .credential(new MetricsAdvisorKeyCredential("subscription_key", "api_key")) .buildClient(); List<String> emails = new ArrayList<>(); emails.add("alertme@alertme.com"); System.out.printf("Creating NotificationHook%n"); NotificationHook emailNotificationHookToCreate = new EmailNotificationHook("email notification Hook1") .setDescription("my email notification Hook") .setEmailsToAlert(emails) .setExternalLink("https: NotificationHook notificationHook = advisorAdministrationClient.createHook(emailNotificationHookToCreate); System.out.printf("Created notification Hook: %s%n", notificationHook.getId()); System.out.printf("Fetching notification Hook: %s%n", notificationHook.getId()); notificationHook = advisorAdministrationClient.getHook(notificationHook.getId()); EmailNotificationHook createdEmailHook = (EmailNotificationHook) notificationHook; System.out.printf("Email Hook Id: %s%n", createdEmailHook.getId()); System.out.printf("Email Hook Name: %s%n", createdEmailHook.getName()); System.out.printf("Email Hook Description: %s%n", createdEmailHook.getDescription()); System.out.printf("Email Hook External Link: %s%n", createdEmailHook.getExternalLink()); System.out.printf("Email Hook Emails: %s%n", String.join(",", createdEmailHook.getEmailsToAlert())); System.out.printf("Updating notification Hook: %s%n", notificationHook.getId()); EmailNotificationHook emailHookToUpdate = (EmailNotificationHook) notificationHook; final List<String> existingEmails = emailHookToUpdate.getEmailsToAlert(); final List<String> emailsToUpdate = new ArrayList<>(existingEmails); emailsToUpdate.remove("alertme@alertme.com"); emailsToUpdate.add("alertme2@alertme.com"); emailsToUpdate.add("alertme3@alertme.com"); emailHookToUpdate .setEmailsToAlert(emailsToUpdate); notificationHook = advisorAdministrationClient.updateHook(emailHookToUpdate); System.out.printf("Updated notification Hook: %s%n", notificationHook.getId()); System.out.printf("Updated Email Hook Emails: %s%n", String.join(",", ((EmailNotificationHook) notificationHook).getEmailsToAlert())); System.out.printf("Deleting Notification Hook: %s%n", notificationHook.getId()); advisorAdministrationClient.deleteHook(notificationHook.getId()); System.out.printf("Deleted Notification Hook%n"); System.out.printf("Creating web NotificationHook%n"); NotificationHook webNotificationHookToCreate = new WebNotificationHook("web notification Hook", "https: .setDescription("my web notification Hook") .setUserCredentials("web-user", "web-user-pwd!") .setExternalLink("https: advisorAdministrationClient.createHook(webNotificationHookToCreate); System.out.printf("Created web notification Hook: %s%n", notificationHook.getId()); }
public static void main(String[] args) { final MetricsAdvisorAdministrationClient advisorAdministrationClient = new MetricsAdvisorAdministrationClientBuilder() .endpoint("https: .credential(new MetricsAdvisorKeyCredential("subscription_key", "api_key")) .buildClient(); List<String> emails = new ArrayList<>(); emails.add("alertme@alertme.com"); System.out.printf("Creating NotificationHook%n"); NotificationHook emailNotificationHookToCreate = new EmailNotificationHook("email notification Hook1") .setDescription("my email notification Hook") .setEmailsToAlert(emails) .setExternalLink("https: NotificationHook notificationHook = advisorAdministrationClient.createHook(emailNotificationHookToCreate); System.out.printf("Created notification Hook: %s%n", notificationHook.getId()); System.out.printf("Fetching notification Hook: %s%n", notificationHook.getId()); notificationHook = advisorAdministrationClient.getHook(notificationHook.getId()); EmailNotificationHook createdEmailHook = (EmailNotificationHook) notificationHook; System.out.printf("Email Hook Id: %s%n", createdEmailHook.getId()); System.out.printf("Email Hook Name: %s%n", createdEmailHook.getName()); System.out.printf("Email Hook Description: %s%n", createdEmailHook.getDescription()); System.out.printf("Email Hook External Link: %s%n", createdEmailHook.getExternalLink()); System.out.printf("Email Hook Emails: %s%n", String.join(",", createdEmailHook.getEmailsToAlert())); System.out.printf("Updating notification Hook: %s%n", notificationHook.getId()); EmailNotificationHook emailHookToUpdate = (EmailNotificationHook) notificationHook; final List<String> existingEmails = emailHookToUpdate.getEmailsToAlert(); final List<String> emailsToUpdate = new ArrayList<>(existingEmails); emailsToUpdate.remove("alertme@alertme.com"); emailsToUpdate.add("alertme2@alertme.com"); emailsToUpdate.add("alertme3@alertme.com"); emailHookToUpdate .setEmailsToAlert(emailsToUpdate); notificationHook = advisorAdministrationClient.updateHook(emailHookToUpdate); System.out.printf("Updated notification Hook: %s%n", notificationHook.getId()); System.out.printf("Updated Email Hook Emails: %s%n", String.join(",", ((EmailNotificationHook) notificationHook).getEmailsToAlert())); System.out.printf("Deleting Notification Hook: %s%n", notificationHook.getId()); advisorAdministrationClient.deleteHook(notificationHook.getId()); System.out.printf("Deleted Notification Hook%n"); System.out.printf("Creating web NotificationHook%n"); NotificationHook webNotificationHookToCreate = new WebNotificationHook("web notification Hook", "https: .setDescription("my web notification Hook") .setUserCredentials("web-user", "web-user-pwd!") .setExternalLink("https: advisorAdministrationClient.createHook(webNotificationHookToCreate); System.out.printf("Created web notification Hook: %s%n", notificationHook.getId()); System.out.printf("Listing hooks%n"); PagedIterable<NotificationHook> hooksIterable = advisorAdministrationClient.listHooks(); for (NotificationHook notificationHookItem : hooksIterable) { if (notificationHookItem instanceof EmailNotificationHook) { EmailNotificationHook emailHook = (EmailNotificationHook) notificationHookItem; System.out.printf("Email Hook Id: %s%n", emailHook.getId()); System.out.printf("Email Hook Name: %s%n", emailHook.getName()); System.out.printf("Email Hook Description: %s%n", emailHook.getDescription()); System.out.printf("Email Hook External Link: %s%n", emailHook.getExternalLink()); System.out.printf("Email Hook Emails: %s%n", String.join(",", emailHook.getEmailsToAlert())); } else if (notificationHookItem instanceof WebNotificationHook) { WebNotificationHook webHook = (WebNotificationHook) notificationHookItem; System.out.printf("Web Hook Id: %s%n", webHook.getId()); System.out.printf("Web Hook Name: %s%n", webHook.getName()); System.out.printf("Web Hook Description: %s%n", webHook.getDescription()); System.out.printf("Web Hook External Link: %s%n", webHook.getExternalLink()); System.out.printf("Web Hook Endpoint: %s%n", webHook.getEndpoint()); System.out.printf("Web Hook Headers: %s%n", webHook.getHttpHeaders()); } } }
class HookSample { }
class HookSample { }
I think the log should be warn level
public void setEnableFullList(Boolean enableFullList) { logger.info(" 'azure.activedirectory.user-group.enable-full-list' property is deprecated." + " 'azure.activedirectory.user-group.allowed-group-ids: all' to instead!"); this.enableFullList = enableFullList; }
logger.info(" 'azure.activedirectory.user-group.enable-full-list' property is deprecated."
public void setEnableFullList(Boolean enableFullList) { logger.warn(" 'azure.activedirectory.user-group.enable-full-list' property detected! " + "Use 'azure.activedirectory.user-group.allowed-group-ids: all' instead!"); this.enableFullList = enableFullList; }
class UserGroupProperties { private final Log logger = LogFactory.getLog(UserGroupProperties.class); /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroupNames = new ArrayList<>(); private Set<String> allowedGroupIds = new HashSet<>(); /** * enableFullList is used to control whether to list all group id, default is false */ private Boolean enableFullList = false; public Set<String> getAllowedGroupIds() { return allowedGroupIds; } /** * Set the allowed group ids. * * @param allowedGroupIds Allowed group ids. */ public void setAllowedGroupIds(Set<String> allowedGroupIds) { this.allowedGroupIds = allowedGroupIds; } public List<String> getAllowedGroupNames() { return allowedGroupNames; } public void setAllowedGroupNames(List<String> allowedGroupNames) { this.allowedGroupNames = allowedGroupNames; } @Deprecated @DeprecatedConfigurationProperty( reason = "enable-full-list is not easy to understand.", replacement = "allowed-group-ids: all") public Boolean getEnableFullList() { return enableFullList; } @Deprecated @DeprecatedConfigurationProperty( reason = "In order to distinguish between allowed-group-ids and allowed-group-names, set allowed-groups " + "deprecated.", replacement = "azure.activedirectory.user-group.allowed-group-names") public List<String> getAllowedGroups() { return allowedGroupNames; } @Deprecated public void setAllowedGroups(List<String> allowedGroups) { logger.info(" 'azure.activedirectory.user-group.allowed-groups' property is deprecated." + " 'azure.activedirectory.user-group.allowed-group-names' to instead!"); this.allowedGroupNames = allowedGroups; } }
class UserGroupProperties { private final Log logger = LogFactory.getLog(UserGroupProperties.class); /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroupNames = new ArrayList<>(); private Set<String> allowedGroupIds = new HashSet<>(); /** * enableFullList is used to control whether to list all group id, default is false */ private Boolean enableFullList = false; public Set<String> getAllowedGroupIds() { return allowedGroupIds; } /** * Set the allowed group ids. * * @param allowedGroupIds Allowed group ids. */ public void setAllowedGroupIds(Set<String> allowedGroupIds) { this.allowedGroupIds = allowedGroupIds; } public List<String> getAllowedGroupNames() { return allowedGroupNames; } public void setAllowedGroupNames(List<String> allowedGroupNames) { this.allowedGroupNames = allowedGroupNames; } @Deprecated @DeprecatedConfigurationProperty( reason = "enable-full-list is not easy to understand.", replacement = "allowed-group-ids: all") public Boolean getEnableFullList() { return enableFullList; } @Deprecated @Deprecated @DeprecatedConfigurationProperty( reason = "In order to distinguish between allowed-group-ids and allowed-group-names, set allowed-groups " + "deprecated.", replacement = "azure.activedirectory.user-group.allowed-group-names") public List<String> getAllowedGroups() { return allowedGroupNames; } @Deprecated public void setAllowedGroups(List<String> allowedGroups) { logger.warn(" 'azure.activedirectory.user-group.allowed-groups' property detected! " + " Use 'azure" + ".activedirectory.user-group.allowed-group-names' instead!"); this.allowedGroupNames = allowedGroups; } }
`Use *** instead! `
public void setAllowedGroups(List<String> allowedGroups) { logger.info(" 'azure.activedirectory.user-group.allowed-groups' property is deprecated." + " 'azure.activedirectory.user-group.allowed-group-names' to instead!"); this.allowedGroupNames = allowedGroups; }
" 'azure.activedirectory.user-group.allowed-group-names' to instead!");
public void setAllowedGroups(List<String> allowedGroups) { logger.warn(" 'azure.activedirectory.user-group.allowed-groups' property detected! " + " Use 'azure" + ".activedirectory.user-group.allowed-group-names' instead!"); this.allowedGroupNames = allowedGroups; }
class UserGroupProperties { private final Log logger = LogFactory.getLog(UserGroupProperties.class); /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroupNames = new ArrayList<>(); private Set<String> allowedGroupIds = new HashSet<>(); /** * enableFullList is used to control whether to list all group id, default is false */ private Boolean enableFullList = false; public Set<String> getAllowedGroupIds() { return allowedGroupIds; } /** * Set the allowed group ids. * * @param allowedGroupIds Allowed group ids. */ public void setAllowedGroupIds(Set<String> allowedGroupIds) { this.allowedGroupIds = allowedGroupIds; } public List<String> getAllowedGroupNames() { return allowedGroupNames; } public void setAllowedGroupNames(List<String> allowedGroupNames) { this.allowedGroupNames = allowedGroupNames; } @Deprecated @DeprecatedConfigurationProperty( reason = "enable-full-list is not easy to understand.", replacement = "allowed-group-ids: all") public Boolean getEnableFullList() { return enableFullList; } public void setEnableFullList(Boolean enableFullList) { logger.info(" 'azure.activedirectory.user-group.enable-full-list' property is deprecated." + " 'azure.activedirectory.user-group.allowed-group-ids: all' to instead!"); this.enableFullList = enableFullList; } @Deprecated @DeprecatedConfigurationProperty( reason = "In order to distinguish between allowed-group-ids and allowed-group-names, set allowed-groups " + "deprecated.", replacement = "azure.activedirectory.user-group.allowed-group-names") public List<String> getAllowedGroups() { return allowedGroupNames; } @Deprecated }
class UserGroupProperties { private final Log logger = LogFactory.getLog(UserGroupProperties.class); /** * Expected UserGroups that an authority will be granted to if found in the response from the MemeberOf Graph * API Call. */ private List<String> allowedGroupNames = new ArrayList<>(); private Set<String> allowedGroupIds = new HashSet<>(); /** * enableFullList is used to control whether to list all group id, default is false */ private Boolean enableFullList = false; public Set<String> getAllowedGroupIds() { return allowedGroupIds; } /** * Set the allowed group ids. * * @param allowedGroupIds Allowed group ids. */ public void setAllowedGroupIds(Set<String> allowedGroupIds) { this.allowedGroupIds = allowedGroupIds; } public List<String> getAllowedGroupNames() { return allowedGroupNames; } public void setAllowedGroupNames(List<String> allowedGroupNames) { this.allowedGroupNames = allowedGroupNames; } @Deprecated @DeprecatedConfigurationProperty( reason = "enable-full-list is not easy to understand.", replacement = "allowed-group-ids: all") public Boolean getEnableFullList() { return enableFullList; } @Deprecated public void setEnableFullList(Boolean enableFullList) { logger.warn(" 'azure.activedirectory.user-group.enable-full-list' property detected! " + "Use 'azure.activedirectory.user-group.allowed-group-ids: all' instead!"); this.enableFullList = enableFullList; } @Deprecated @DeprecatedConfigurationProperty( reason = "In order to distinguish between allowed-group-ids and allowed-group-names, set allowed-groups " + "deprecated.", replacement = "azure.activedirectory.user-group.allowed-group-names") public List<String> getAllowedGroups() { return allowedGroupNames; } @Deprecated }
I'm good with this change as it helps prevent deeply escaping into classes outside of our ownership, but I'd love to get the thoughts of @srnagar and @JonathanGiles .
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { if (value.getClass().getPackage().getName().contains("com.azure")) { f.setAccessible(true); } else { return; } try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
if (value.getClass().getPackage().getName().contains("com.azure")) {
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String || value instanceof ExpandableStringEnum) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { f.setAccessible(true); try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
I guess I'd need more context here - what is the downside to not doing this? By default magic strings make me a little anxious, so I'd prefer to not have this here if possible.
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { if (value.getClass().getPackage().getName().contains("com.azure")) { f.setAccessible(true); } else { return; } try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
if (value.getClass().getPackage().getName().contains("com.azure")) {
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String || value instanceof ExpandableStringEnum) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { f.setAccessible(true); try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
It seems like this check is added as an optimization to short circuit the recursion. Will this gain us any significant perf improvements? If not, it may not be necessary to over-optimize here to minimize custom logic specific to package names.
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { if (value.getClass().getPackage().getName().contains("com.azure")) { f.setAccessible(true); } else { return; } try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
if (value.getClass().getPackage().getName().contains("com.azure")) {
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String || value instanceof ExpandableStringEnum) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { f.setAccessible(true); try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
It's both an optimization and a safe-guard to prevent attempting to flatten out classes that we don't have reflective access. Before this was added, during CI there was an attempt to flatten Java built-in classes such as `Constructor` which resulted in an exception being thrown as `java.lang.reflect` doesn't open to `com.azure.core`.
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { if (value.getClass().getPackage().getName().contains("com.azure")) { f.setAccessible(true); } else { return; } try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
if (value.getClass().getPackage().getName().contains("com.azure")) {
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String || value instanceof ExpandableStringEnum) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { f.setAccessible(true); try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
Yes, like [here](https://dev.azure.com/azure-sdk/public/_build/results?buildId=1010037&view=logs&j=cc8b1317-ae45-5611-4652-6b3ff0c8aa54&t=eb4c291c-cc36-5a28-3a59-910c03bf3803&l=36953) wherein we were trying to access the `clazz` property on the `ExpandableStringEnum` class.
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { if (value.getClass().getPackage().getName().contains("com.azure")) { f.setAccessible(true); } else { return; } try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
if (value.getClass().getPackage().getName().contains("com.azure")) {
private static void escapeMapKeys(Object value, ClientLogger logger) { if (value == null) { return; } if (value.getClass().isPrimitive() || value.getClass().isEnum() || value instanceof OffsetDateTime || value instanceof Duration || value instanceof String || value instanceof ExpandableStringEnum) { return; } if (value instanceof Map<?, ?>) { for (String key : new HashSet<>(((Map<String, Object>) value).keySet())) { if (key.contains(".")) { String newKey = UNESCAPED_PERIOD_PATTERN.matcher(key).replaceAll("\\\\."); Object val = ((Map<String, Object>) value).remove(key); ((Map<String, Object>) value).put(newKey, val); } } for (Object val : ((Map<?, ?>) value).values()) { escapeMapKeys(val, logger); } return; } if (value instanceof List<?>) { for (Object val : ((List<?>) value)) { escapeMapKeys(val, logger); } return; } for (Field f : getAllDeclaredFields(value.getClass())) { f.setAccessible(true); try { escapeMapKeys(f.get(value), logger); } catch (IllegalAccessException e) { throw logger.logExceptionAsError(new RuntimeException(e)); } } }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
class is annotated with @JsonFlatten add the serializer. boolean hasJsonFlattenOnClass = beanDesc.getClassAnnotations().has(JsonFlatten.class); boolean hasJsonFlattenOnProperty = beanDesc.findProperties().stream() .filter(BeanPropertyDefinition::hasField) .map(BeanPropertyDefinition::getField) .anyMatch(field -> field.hasAnnotation(JsonFlatten.class)); if (hasJsonFlattenOnClass || hasJsonFlattenOnProperty) { return new FlatteningSerializer(beanDesc, serializer, mapper); }
Same here.
public DefaultServiceBusTopicClientFactory(String connectionString) { super(connectionString); this.serviceBusClientBuilder = new ServiceBusClientBuilder().connectionString(connectionString); }
this.serviceBusClientBuilder = new ServiceBusClientBuilder().connectionString(connectionString);
public DefaultServiceBusTopicClientFactory(String connectionString) { this(connectionString, AmqpTransportType.AMQP); }
class DefaultServiceBusTopicClientFactory extends AbstractServiceBusSenderFactory implements ServiceBusTopicClientFactory { private final ServiceBusClientBuilder serviceBusClientBuilder; private final Map<Tuple<String, String>, ServiceBusProcessorClient> topicProcessorMap = new ConcurrentHashMap<>(); private final Map<String, ServiceBusSenderAsyncClient> topicSenderMap = new ConcurrentHashMap<>(); public DefaultServiceBusTopicClientFactory(String connectionString, AmqpTransportType amqpTransportType) { this(connectionString); this.serviceBusClientBuilder.transportType(amqpTransportType); } @Override public ServiceBusProcessorClient getOrCreateProcessor( String topic, String subscription, ServiceBusClientConfig clientConfig, ServiceBusMessageProcessor<ServiceBusReceivedMessageContext, ServiceBusErrorContext> messageProcessor) { return this.topicProcessorMap.computeIfAbsent(Tuple.of(topic, subscription), t -> createProcessor(t.getFirst(), t.getSecond(), clientConfig, messageProcessor)); } @Override public ServiceBusSenderAsyncClient getOrCreateSender(String name) { return this.topicSenderMap.computeIfAbsent(name, this::createTopicSender); } private ServiceBusProcessorClient createProcessor(String topic, String subscription, ServiceBusClientConfig config, ServiceBusMessageProcessor<ServiceBusReceivedMessageContext, ServiceBusErrorContext> messageProcessor) { if (config.isSessionsEnabled()) { return serviceBusClientBuilder.sessionProcessor() .topicName(topic) .subscriptionName(subscription) .receiveMode(ServiceBusReceiveMode.PEEK_LOCK) .maxConcurrentCalls(1) .maxConcurrentSessions(config.getConcurrency()) .prefetchCount(config.getPrefetchCount()) .disableAutoComplete() .processMessage(messageProcessor.processMessage()) .processError(messageProcessor.processError()) .buildProcessorClient(); } else { return serviceBusClientBuilder.processor() .topicName(topic) .subscriptionName(subscription) .receiveMode(ServiceBusReceiveMode.PEEK_LOCK) .maxConcurrentCalls(config.getConcurrency()) .prefetchCount(config.getPrefetchCount()) .disableAutoComplete() .processMessage(messageProcessor.processMessage()) .processError(messageProcessor.processError()) .buildProcessorClient(); } } private ServiceBusSenderAsyncClient createTopicSender(String name) { return serviceBusClientBuilder.sender().topicName(name).buildAsyncClient(); } }
class DefaultServiceBusTopicClientFactory extends AbstractServiceBusSenderFactory implements ServiceBusTopicClientFactory { private final ServiceBusClientBuilder serviceBusClientBuilder; private final Map<Tuple<String, String>, ServiceBusProcessorClient> topicProcessorMap = new ConcurrentHashMap<>(); private final Map<String, ServiceBusSenderAsyncClient> topicSenderMap = new ConcurrentHashMap<>(); public DefaultServiceBusTopicClientFactory(String connectionString, AmqpTransportType amqpTransportType) { super(connectionString); this.serviceBusClientBuilder = new ServiceBusClientBuilder().connectionString(connectionString); this.serviceBusClientBuilder.transportType(amqpTransportType); } @Override public ServiceBusProcessorClient getOrCreateProcessor( String topic, String subscription, ServiceBusClientConfig clientConfig, ServiceBusMessageProcessor<ServiceBusReceivedMessageContext, ServiceBusErrorContext> messageProcessor) { return this.topicProcessorMap.computeIfAbsent(Tuple.of(topic, subscription), t -> createProcessor(t.getFirst(), t.getSecond(), clientConfig, messageProcessor)); } @Override public ServiceBusSenderAsyncClient getOrCreateSender(String name) { return this.topicSenderMap.computeIfAbsent(name, this::createTopicSender); } private ServiceBusProcessorClient createProcessor(String topic, String subscription, ServiceBusClientConfig config, ServiceBusMessageProcessor<ServiceBusReceivedMessageContext, ServiceBusErrorContext> messageProcessor) { if (config.isSessionsEnabled()) { return serviceBusClientBuilder.sessionProcessor() .topicName(topic) .subscriptionName(subscription) .receiveMode(ServiceBusReceiveMode.PEEK_LOCK) .maxConcurrentCalls(1) .maxConcurrentSessions(config.getConcurrency()) .prefetchCount(config.getPrefetchCount()) .disableAutoComplete() .processMessage(messageProcessor.processMessage()) .processError(messageProcessor.processError()) .buildProcessorClient(); } else { return serviceBusClientBuilder.processor() .topicName(topic) .subscriptionName(subscription) .receiveMode(ServiceBusReceiveMode.PEEK_LOCK) .maxConcurrentCalls(config.getConcurrency()) .prefetchCount(config.getPrefetchCount()) .disableAutoComplete() .processMessage(messageProcessor.processMessage()) .processError(messageProcessor.processError()) .buildProcessorClient(); } } private ServiceBusSenderAsyncClient createTopicSender(String name) { return serviceBusClientBuilder.sender().topicName(name).buildAsyncClient(); } }
I would create a helper method to avoid repeating the code ```suggestion if (isTracerEnabled()) { ``` and define the helper method: ```java boolean isTracerEnabled() { return this.pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled() ```
private Flux<FeedResponse<T>> byPage(CosmosPagedFluxOptions pagedFluxOptions, Context context) { final AtomicReference<Context> parentContext = new AtomicReference<>(Context.NONE); AtomicReference<Instant> startTime = new AtomicReference<>(); return this.optionsFluxFunction.apply(pagedFluxOptions).doOnSubscribe(ignoredValue -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { parentContext.set(pagedFluxOptions.getTracerProvider().startSpan(pagedFluxOptions.getTracerSpanName(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getServiceEndpoint(), context)); } startTime.set(Instant.now()); }).doOnComplete(() -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.complete(), HttpConstants.StatusCodes.OK); } }).doOnError(throwable -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.error(throwable), TracerProvider.ERROR_CODE); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient())) && throwable instanceof CosmosException) { CosmosException cosmosException = (CosmosException) throwable; if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(cosmosException.getDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), 0, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) cosmosException.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); } } startTime.set(Instant.now()); }).doOnNext(feedResponse -> { if (feedResponseConsumer != null) { feedResponseConsumer.accept(feedResponse); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient()))) { if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(feedResponse.getCosmosDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), HttpConstants.StatusCodes.OK, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) feedResponse.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); startTime.set(Instant.now()); }; } }); }
if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) {
then call it with each feedResponse if (feedResponseConsumer != null) { feedResponseConsumer.accept(feedResponse); }
class CosmosPagedFlux<T> extends ContinuablePagedFlux<String, T, FeedResponse<T>> { private final Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction; private final Consumer<FeedResponse<T>> feedResponseConsumer; private ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor cosmosDiagnosticsAccessor; CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = null; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction, Consumer<FeedResponse<T>> feedResponseConsumer) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = feedResponseConsumer; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } /** * Handle for invoking "side-effects" on each FeedResponse returned by CosmosPagedFlux * * @param newFeedResponseConsumer handler * @return CosmosPagedFlux instance with attached handler */ public CosmosPagedFlux<T> handle(Consumer<FeedResponse<T>> newFeedResponseConsumer) { if (this.feedResponseConsumer != null) { return new CosmosPagedFlux<T>( this.optionsFluxFunction, this.feedResponseConsumer.andThen(newFeedResponseConsumer)); } else { return new CosmosPagedFlux<T>(this.optionsFluxFunction, newFeedResponseConsumer); } } @Override public Flux<FeedResponse<T>> byPage() { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken, int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } /** * Subscribe to consume all items of type {@code T} in the sequence respectively. This is recommended for most * common scenarios. This will seamlessly fetch next page when required and provide with a {@link Flux} of items. * * @param coreSubscriber The subscriber for this {@link CosmosPagedFlux} */ @Override public void subscribe(CoreSubscriber<? super T> coreSubscriber) { Flux<FeedResponse<T>> pagedResponse = this.byPage(); pagedResponse.flatMap(tFeedResponse -> { IterableStream<T> elements = tFeedResponse.getElements(); if (elements == null) { return Flux.empty(); } return Flux.fromIterable(elements); }).subscribe(coreSubscriber); } private Flux<FeedResponse<T>> byPage(CosmosPagedFluxOptions pagedFluxOptions, Context context) { final AtomicReference<Context> parentContext = new AtomicReference<>(Context.NONE); AtomicReference<Instant> startTime = new AtomicReference<>(); return this.optionsFluxFunction.apply(pagedFluxOptions).doOnSubscribe(ignoredValue -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { parentContext.set(pagedFluxOptions.getTracerProvider().startSpan(pagedFluxOptions.getTracerSpanName(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getServiceEndpoint(), context)); } startTime.set(Instant.now()); }).doOnComplete(() -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.complete(), HttpConstants.StatusCodes.OK); } }).doOnError(throwable -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.error(throwable), TracerProvider.ERROR_CODE); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient())) && throwable instanceof CosmosException) { CosmosException cosmosException = (CosmosException) throwable; if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(cosmosException.getDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), 0, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) cosmosException.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); } } startTime.set(Instant.now()); }).doOnNext(feedResponse -> { if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient()))) { if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(feedResponse.getCosmosDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), HttpConstants.StatusCodes.OK, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) feedResponse.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); startTime.set(Instant.now()); }; } }); } private void fillClientTelemetry(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, float requestCharge, Duration latency) { ClientTelemetry telemetry = BridgeInternal.getContextClient(cosmosAsyncClient).getClientTelemetry(); ReportPayload reportPayloadLatency = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_LATENCY_NAME, ClientTelemetry.REQUEST_LATENCY_UNIT); ConcurrentDoubleHistogram latencyHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadLatency); if (latencyHistogram != null) { ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); } else { if (statusCode == HttpConstants.StatusCodes.OK) { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_SUCCESS_PRECISION); } else { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_FAILURE_PRECISION); } latencyHistogram.setAutoResize(true); ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadLatency, latencyHistogram); } ReportPayload reportPayloadRequestCharge = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_CHARGE_NAME, ClientTelemetry.REQUEST_CHARGE_UNIT); ConcurrentDoubleHistogram requestChargeHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadRequestCharge); if (requestChargeHistogram != null) { ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); } else { requestChargeHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_CHARGE_MAX, ClientTelemetry.REQUEST_CHARGE_PRECISION); requestChargeHistogram.setAutoResize(true); ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadRequestCharge, requestChargeHistogram); } } static { ImplementationBridgeHelpers.CosmosPageFluxHelper.setCosmosPageFluxAccessor( new ImplementationBridgeHelpers.CosmosPageFluxHelper.CosmosPageFluxAccessor() { @Override public <T> CosmosPagedFlux<T> getCosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { return new CosmosPagedFlux<>(optionsFluxFunction); } }); } private ReportPayload createReportPayload(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, String metricsName, String unitName) { ReportPayload reportPayload = new ReportPayload(metricsName, unitName); reportPayload.setConsistency(consistencyLevel == null ? BridgeInternal.getContextClient(cosmosAsyncClient).getConsistencyLevel() : consistencyLevel); reportPayload.setDatabaseName(databaseId); reportPayload.setContainerName(containerId); reportPayload.setOperation(operationType); reportPayload.setResource(resourceType); reportPayload.setStatusCode(statusCode); return reportPayload; } }
class CosmosPagedFlux<T> extends ContinuablePagedFlux<String, T, FeedResponse<T>> { private final Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction; private final Consumer<FeedResponse<T>> feedResponseConsumer; private ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor cosmosDiagnosticsAccessor; CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = null; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction, Consumer<FeedResponse<T>> feedResponseConsumer) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = feedResponseConsumer; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } /** * Handle for invoking "side-effects" on each FeedResponse returned by CosmosPagedFlux * * @param newFeedResponseConsumer handler * @return CosmosPagedFlux instance with attached handler */ public CosmosPagedFlux<T> handle(Consumer<FeedResponse<T>> newFeedResponseConsumer) { if (this.feedResponseConsumer != null) { return new CosmosPagedFlux<T>( this.optionsFluxFunction, this.feedResponseConsumer.andThen(newFeedResponseConsumer)); } else { return new CosmosPagedFlux<T>(this.optionsFluxFunction, newFeedResponseConsumer); } } @Override public Flux<FeedResponse<T>> byPage() { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken, int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } /** * Subscribe to consume all items of type {@code T} in the sequence respectively. This is recommended for most * common scenarios. This will seamlessly fetch next page when required and provide with a {@link Flux} of items. * * @param coreSubscriber The subscriber for this {@link CosmosPagedFlux} */ @Override public void subscribe(CoreSubscriber<? super T> coreSubscriber) { Flux<FeedResponse<T>> pagedResponse = this.byPage(); pagedResponse.flatMap(tFeedResponse -> { IterableStream<T> elements = tFeedResponse.getElements(); if (elements == null) { return Flux.empty(); } return Flux.fromIterable(elements); }).subscribe(coreSubscriber); } private Flux<FeedResponse<T>> byPage(CosmosPagedFluxOptions pagedFluxOptions, Context context) { final AtomicReference<Context> parentContext = new AtomicReference<>(Context.NONE); AtomicReference<Instant> startTime = new AtomicReference<>(); return this.optionsFluxFunction.apply(pagedFluxOptions).doOnSubscribe(ignoredValue -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { parentContext.set(pagedFluxOptions.getTracerProvider().startSpan(pagedFluxOptions.getTracerSpanName(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getServiceEndpoint(), context)); } startTime.set(Instant.now()); }).doOnComplete(() -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.complete(), HttpConstants.StatusCodes.OK); } }).doOnError(throwable -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.error(throwable), TracerProvider.ERROR_CODE); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient())) && throwable instanceof CosmosException) { CosmosException cosmosException = (CosmosException) throwable; if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(cosmosException.getDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), 0, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) cosmosException.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); } } startTime.set(Instant.now()); }).doOnNext(feedResponse -> { if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient()))) { if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(feedResponse.getCosmosDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), HttpConstants.StatusCodes.OK, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) feedResponse.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); startTime.set(Instant.now()); }; } }); } private void fillClientTelemetry(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, float requestCharge, Duration latency) { ClientTelemetry telemetry = BridgeInternal.getContextClient(cosmosAsyncClient).getClientTelemetry(); ReportPayload reportPayloadLatency = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_LATENCY_NAME, ClientTelemetry.REQUEST_LATENCY_UNIT); ConcurrentDoubleHistogram latencyHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadLatency); if (latencyHistogram != null) { ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); } else { if (statusCode == HttpConstants.StatusCodes.OK) { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_SUCCESS_PRECISION); } else { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_FAILURE_PRECISION); } latencyHistogram.setAutoResize(true); ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadLatency, latencyHistogram); } ReportPayload reportPayloadRequestCharge = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_CHARGE_NAME, ClientTelemetry.REQUEST_CHARGE_UNIT); ConcurrentDoubleHistogram requestChargeHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadRequestCharge); if (requestChargeHistogram != null) { ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); } else { requestChargeHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_CHARGE_MAX, ClientTelemetry.REQUEST_CHARGE_PRECISION); requestChargeHistogram.setAutoResize(true); ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadRequestCharge, requestChargeHistogram); } } static { ImplementationBridgeHelpers.CosmosPageFluxHelper.setCosmosPageFluxAccessor( new ImplementationBridgeHelpers.CosmosPageFluxHelper.CosmosPageFluxAccessor() { @Override public <T> CosmosPagedFlux<T> getCosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { return new CosmosPagedFlux<>(optionsFluxFunction); } }); } private ReportPayload createReportPayload(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, String metricsName, String unitName) { ReportPayload reportPayload = new ReportPayload(metricsName, unitName); reportPayload.setConsistency(consistencyLevel == null ? BridgeInternal.getContextClient(cosmosAsyncClient).getConsistencyLevel() : consistencyLevel); reportPayload.setDatabaseName(databaseId); reportPayload.setContainerName(containerId); reportPayload.setOperation(operationType); reportPayload.setResource(resourceType); reportPayload.setStatusCode(statusCode); return reportPayload; } }
its just two condition so should be fine, if it increase i will add in future
private Flux<FeedResponse<T>> byPage(CosmosPagedFluxOptions pagedFluxOptions, Context context) { final AtomicReference<Context> parentContext = new AtomicReference<>(Context.NONE); AtomicReference<Instant> startTime = new AtomicReference<>(); return this.optionsFluxFunction.apply(pagedFluxOptions).doOnSubscribe(ignoredValue -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { parentContext.set(pagedFluxOptions.getTracerProvider().startSpan(pagedFluxOptions.getTracerSpanName(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getServiceEndpoint(), context)); } startTime.set(Instant.now()); }).doOnComplete(() -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.complete(), HttpConstants.StatusCodes.OK); } }).doOnError(throwable -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.error(throwable), TracerProvider.ERROR_CODE); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient())) && throwable instanceof CosmosException) { CosmosException cosmosException = (CosmosException) throwable; if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(cosmosException.getDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), 0, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) cosmosException.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); } } startTime.set(Instant.now()); }).doOnNext(feedResponse -> { if (feedResponseConsumer != null) { feedResponseConsumer.accept(feedResponse); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient()))) { if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(feedResponse.getCosmosDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), HttpConstants.StatusCodes.OK, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) feedResponse.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); startTime.set(Instant.now()); }; } }); }
if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) {
then call it with each feedResponse if (feedResponseConsumer != null) { feedResponseConsumer.accept(feedResponse); }
class CosmosPagedFlux<T> extends ContinuablePagedFlux<String, T, FeedResponse<T>> { private final Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction; private final Consumer<FeedResponse<T>> feedResponseConsumer; private ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor cosmosDiagnosticsAccessor; CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = null; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction, Consumer<FeedResponse<T>> feedResponseConsumer) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = feedResponseConsumer; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } /** * Handle for invoking "side-effects" on each FeedResponse returned by CosmosPagedFlux * * @param newFeedResponseConsumer handler * @return CosmosPagedFlux instance with attached handler */ public CosmosPagedFlux<T> handle(Consumer<FeedResponse<T>> newFeedResponseConsumer) { if (this.feedResponseConsumer != null) { return new CosmosPagedFlux<T>( this.optionsFluxFunction, this.feedResponseConsumer.andThen(newFeedResponseConsumer)); } else { return new CosmosPagedFlux<T>(this.optionsFluxFunction, newFeedResponseConsumer); } } @Override public Flux<FeedResponse<T>> byPage() { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken, int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } /** * Subscribe to consume all items of type {@code T} in the sequence respectively. This is recommended for most * common scenarios. This will seamlessly fetch next page when required and provide with a {@link Flux} of items. * * @param coreSubscriber The subscriber for this {@link CosmosPagedFlux} */ @Override public void subscribe(CoreSubscriber<? super T> coreSubscriber) { Flux<FeedResponse<T>> pagedResponse = this.byPage(); pagedResponse.flatMap(tFeedResponse -> { IterableStream<T> elements = tFeedResponse.getElements(); if (elements == null) { return Flux.empty(); } return Flux.fromIterable(elements); }).subscribe(coreSubscriber); } private Flux<FeedResponse<T>> byPage(CosmosPagedFluxOptions pagedFluxOptions, Context context) { final AtomicReference<Context> parentContext = new AtomicReference<>(Context.NONE); AtomicReference<Instant> startTime = new AtomicReference<>(); return this.optionsFluxFunction.apply(pagedFluxOptions).doOnSubscribe(ignoredValue -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { parentContext.set(pagedFluxOptions.getTracerProvider().startSpan(pagedFluxOptions.getTracerSpanName(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getServiceEndpoint(), context)); } startTime.set(Instant.now()); }).doOnComplete(() -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.complete(), HttpConstants.StatusCodes.OK); } }).doOnError(throwable -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.error(throwable), TracerProvider.ERROR_CODE); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient())) && throwable instanceof CosmosException) { CosmosException cosmosException = (CosmosException) throwable; if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(cosmosException.getDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), 0, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) cosmosException.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); } } startTime.set(Instant.now()); }).doOnNext(feedResponse -> { if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient()))) { if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(feedResponse.getCosmosDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), HttpConstants.StatusCodes.OK, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) feedResponse.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); startTime.set(Instant.now()); }; } }); } private void fillClientTelemetry(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, float requestCharge, Duration latency) { ClientTelemetry telemetry = BridgeInternal.getContextClient(cosmosAsyncClient).getClientTelemetry(); ReportPayload reportPayloadLatency = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_LATENCY_NAME, ClientTelemetry.REQUEST_LATENCY_UNIT); ConcurrentDoubleHistogram latencyHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadLatency); if (latencyHistogram != null) { ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); } else { if (statusCode == HttpConstants.StatusCodes.OK) { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_SUCCESS_PRECISION); } else { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_FAILURE_PRECISION); } latencyHistogram.setAutoResize(true); ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadLatency, latencyHistogram); } ReportPayload reportPayloadRequestCharge = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_CHARGE_NAME, ClientTelemetry.REQUEST_CHARGE_UNIT); ConcurrentDoubleHistogram requestChargeHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadRequestCharge); if (requestChargeHistogram != null) { ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); } else { requestChargeHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_CHARGE_MAX, ClientTelemetry.REQUEST_CHARGE_PRECISION); requestChargeHistogram.setAutoResize(true); ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadRequestCharge, requestChargeHistogram); } } static { ImplementationBridgeHelpers.CosmosPageFluxHelper.setCosmosPageFluxAccessor( new ImplementationBridgeHelpers.CosmosPageFluxHelper.CosmosPageFluxAccessor() { @Override public <T> CosmosPagedFlux<T> getCosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { return new CosmosPagedFlux<>(optionsFluxFunction); } }); } private ReportPayload createReportPayload(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, String metricsName, String unitName) { ReportPayload reportPayload = new ReportPayload(metricsName, unitName); reportPayload.setConsistency(consistencyLevel == null ? BridgeInternal.getContextClient(cosmosAsyncClient).getConsistencyLevel() : consistencyLevel); reportPayload.setDatabaseName(databaseId); reportPayload.setContainerName(containerId); reportPayload.setOperation(operationType); reportPayload.setResource(resourceType); reportPayload.setStatusCode(statusCode); return reportPayload; } }
class CosmosPagedFlux<T> extends ContinuablePagedFlux<String, T, FeedResponse<T>> { private final Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction; private final Consumer<FeedResponse<T>> feedResponseConsumer; private ImplementationBridgeHelpers.CosmosDiagnosticsHelper.CosmosDiagnosticsAccessor cosmosDiagnosticsAccessor; CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = null; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } CosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction, Consumer<FeedResponse<T>> feedResponseConsumer) { this.optionsFluxFunction = optionsFluxFunction; this.feedResponseConsumer = feedResponseConsumer; this.cosmosDiagnosticsAccessor = ImplementationBridgeHelpers.CosmosDiagnosticsHelper.getCosmosDiagnosticsAccessor(); } /** * Handle for invoking "side-effects" on each FeedResponse returned by CosmosPagedFlux * * @param newFeedResponseConsumer handler * @return CosmosPagedFlux instance with attached handler */ public CosmosPagedFlux<T> handle(Consumer<FeedResponse<T>> newFeedResponseConsumer) { if (this.feedResponseConsumer != null) { return new CosmosPagedFlux<T>( this.optionsFluxFunction, this.feedResponseConsumer.andThen(newFeedResponseConsumer)); } else { return new CosmosPagedFlux<T>(this.optionsFluxFunction, newFeedResponseConsumer); } } @Override public Flux<FeedResponse<T>> byPage() { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } @Override public Flux<FeedResponse<T>> byPage(String continuationToken, int preferredPageSize) { CosmosPagedFluxOptions cosmosPagedFluxOptions = new CosmosPagedFluxOptions(); cosmosPagedFluxOptions.setRequestContinuation(continuationToken); cosmosPagedFluxOptions.setMaxItemCount(preferredPageSize); return FluxUtil.fluxContext(context -> byPage(cosmosPagedFluxOptions, context)); } /** * Subscribe to consume all items of type {@code T} in the sequence respectively. This is recommended for most * common scenarios. This will seamlessly fetch next page when required and provide with a {@link Flux} of items. * * @param coreSubscriber The subscriber for this {@link CosmosPagedFlux} */ @Override public void subscribe(CoreSubscriber<? super T> coreSubscriber) { Flux<FeedResponse<T>> pagedResponse = this.byPage(); pagedResponse.flatMap(tFeedResponse -> { IterableStream<T> elements = tFeedResponse.getElements(); if (elements == null) { return Flux.empty(); } return Flux.fromIterable(elements); }).subscribe(coreSubscriber); } private Flux<FeedResponse<T>> byPage(CosmosPagedFluxOptions pagedFluxOptions, Context context) { final AtomicReference<Context> parentContext = new AtomicReference<>(Context.NONE); AtomicReference<Instant> startTime = new AtomicReference<>(); return this.optionsFluxFunction.apply(pagedFluxOptions).doOnSubscribe(ignoredValue -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { parentContext.set(pagedFluxOptions.getTracerProvider().startSpan(pagedFluxOptions.getTracerSpanName(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getServiceEndpoint(), context)); } startTime.set(Instant.now()); }).doOnComplete(() -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.complete(), HttpConstants.StatusCodes.OK); } }).doOnError(throwable -> { if (pagedFluxOptions.getTracerProvider() != null && pagedFluxOptions.getTracerProvider().isEnabled()) { pagedFluxOptions.getTracerProvider().endSpan(parentContext.get(), Signal.error(throwable), TracerProvider.ERROR_CODE); } if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient())) && throwable instanceof CosmosException) { CosmosException cosmosException = (CosmosException) throwable; if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(cosmosException.getDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), 0, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) cosmosException.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); } } startTime.set(Instant.now()); }).doOnNext(feedResponse -> { if (pagedFluxOptions.getCosmosAsyncClient() != null && Configs.isClientTelemetryEnabled(BridgeInternal.isClientTelemetryEnabled(pagedFluxOptions.getCosmosAsyncClient()))) { if (this.cosmosDiagnosticsAccessor.isDiagnosticsCapturedInPagedFlux(feedResponse.getCosmosDiagnostics()).compareAndSet(false, true)) { fillClientTelemetry(pagedFluxOptions.getCosmosAsyncClient(), HttpConstants.StatusCodes.OK, pagedFluxOptions.getContainerId(), pagedFluxOptions.getDatabaseId(), pagedFluxOptions.getOperationType(), pagedFluxOptions.getResourceType(), BridgeInternal.getContextClient(pagedFluxOptions.getCosmosAsyncClient()).getConsistencyLevel(), (float) feedResponse.getRequestCharge(), Duration.between(startTime.get(), Instant.now())); startTime.set(Instant.now()); }; } }); } private void fillClientTelemetry(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, float requestCharge, Duration latency) { ClientTelemetry telemetry = BridgeInternal.getContextClient(cosmosAsyncClient).getClientTelemetry(); ReportPayload reportPayloadLatency = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_LATENCY_NAME, ClientTelemetry.REQUEST_LATENCY_UNIT); ConcurrentDoubleHistogram latencyHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadLatency); if (latencyHistogram != null) { ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); } else { if (statusCode == HttpConstants.StatusCodes.OK) { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_SUCCESS_PRECISION); } else { latencyHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_LATENCY_MAX_MICRO_SEC, ClientTelemetry.REQUEST_LATENCY_FAILURE_PRECISION); } latencyHistogram.setAutoResize(true); ClientTelemetry.recordValue(latencyHistogram, latency.toNanos() / 1000); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadLatency, latencyHistogram); } ReportPayload reportPayloadRequestCharge = createReportPayload(cosmosAsyncClient, statusCode, containerId, databaseId , operationType, resourceType, consistencyLevel, ClientTelemetry.REQUEST_CHARGE_NAME, ClientTelemetry.REQUEST_CHARGE_UNIT); ConcurrentDoubleHistogram requestChargeHistogram = telemetry.getClientTelemetryInfo().getOperationInfoMap().get(reportPayloadRequestCharge); if (requestChargeHistogram != null) { ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); } else { requestChargeHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.REQUEST_CHARGE_MAX, ClientTelemetry.REQUEST_CHARGE_PRECISION); requestChargeHistogram.setAutoResize(true); ClientTelemetry.recordValue(requestChargeHistogram, requestCharge); telemetry.getClientTelemetryInfo().getOperationInfoMap().put(reportPayloadRequestCharge, requestChargeHistogram); } } static { ImplementationBridgeHelpers.CosmosPageFluxHelper.setCosmosPageFluxAccessor( new ImplementationBridgeHelpers.CosmosPageFluxHelper.CosmosPageFluxAccessor() { @Override public <T> CosmosPagedFlux<T> getCosmosPagedFlux(Function<CosmosPagedFluxOptions, Flux<FeedResponse<T>>> optionsFluxFunction) { return new CosmosPagedFlux<>(optionsFluxFunction); } }); } private ReportPayload createReportPayload(CosmosAsyncClient cosmosAsyncClient, int statusCode, String containerId, String databaseId, OperationType operationType, ResourceType resourceType, ConsistencyLevel consistencyLevel, String metricsName, String unitName) { ReportPayload reportPayload = new ReportPayload(metricsName, unitName); reportPayload.setConsistency(consistencyLevel == null ? BridgeInternal.getContextClient(cosmosAsyncClient).getConsistencyLevel() : consistencyLevel); reportPayload.setDatabaseName(databaseId); reportPayload.setContainerName(containerId); reportPayload.setOperation(operationType); reportPayload.setResource(resourceType); reportPayload.setStatusCode(statusCode); return reportPayload; } }
Maybe we should still keep this change.
public void testOnlyOneThreadRefreshesToken() throws Exception { SimpleTokenCache cache = new SimpleTokenCache(() -> incrementalRemoteGetTokenAsync(new AtomicInteger(1))); CountDownLatch latch = new CountDownLatch(1); AtomicLong maxMillis = new AtomicLong(0); Flux.range(1, 10) .flatMap(i -> Mono.just(OffsetDateTime.now()) .publishOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { if (millis > maxMillis.get()) { maxMillis.set(millis); } }))) .doOnComplete(latch::countDown) .subscribe(); latch.await(); long maxMs = maxMillis.get(); Assertions.assertTrue(maxMs > 1000, () -> String.format("maxMillis was less than 1000ms. Was %d.", maxMs)); Assertions.assertTrue(maxMs < 2000, () -> String.format("maxMillis was greater than 2000ms. Was %d.", maxMs)); }
.publishOn(Schedulers.parallel())
public void testOnlyOneThreadRefreshesToken() throws Exception { SimpleTokenCache cache = new SimpleTokenCache(() -> incrementalRemoteGetTokenAsync(new AtomicInteger(1))); CountDownLatch latch = new CountDownLatch(1); AtomicLong maxMillis = new AtomicLong(0); Flux.range(1, 10) .flatMap(i -> Mono.just(OffsetDateTime.now()) .publishOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { if (millis > maxMillis.get()) { maxMillis.set(millis); } }))) .doOnComplete(latch::countDown) .subscribe(); latch.await(); long maxMs = maxMillis.get(); Assertions.assertTrue(maxMs > 1000, () -> String.format("maxMillis was less than 1000ms. Was %d.", maxMs)); Assertions.assertTrue(maxMs < 2000, () -> String.format("maxMillis was greater than 2000ms. Was %d.", maxMs)); }
class TokenCacheTests { private static final Random RANDOM = new Random(); @Test @Test public void testLongRunningWontOverflow() throws Exception { AtomicLong refreshes = new AtomicLong(0); SimpleTokenCache cache = new SimpleTokenCache(() -> { refreshes.incrementAndGet(); return remoteGetTokenThatExpiresSoonAsync(1000, 0); }); VirtualTimeScheduler virtualTimeScheduler = VirtualTimeScheduler.create(); CountDownLatch latch = new CountDownLatch(1); Flux.interval(Duration.ofMillis(100), virtualTimeScheduler) .take(100) .flatMap(i -> Mono.just(OffsetDateTime.now()) .subscribeOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { }))) .doOnComplete(latch::countDown) .subscribe(); virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(40)); latch.await(); Assertions.assertTrue(refreshes.get() <= 11); } private Mono<AccessToken> remoteGetTokenAsync(long delayInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private Mono<AccessToken> remoteGetTokenThatExpiresSoonAsync(long delayInMillis, long validityInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)), validityInMillis)); } private Mono<AccessToken> incrementalRemoteGetTokenAsync(AtomicInteger latency) { return Mono.delay(Duration.ofSeconds(latency.getAndIncrement())) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private static class Token extends AccessToken { private String token; private OffsetDateTime expiry; @Override public String getToken() { return token; } Token(String token) { this(token, 5000); } Token(String token, long validityInMillis) { super(token, OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis))); this.token = token; this.expiry = OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis)); } @Override public OffsetDateTime getExpiresAt() { return expiry; } @Override public boolean isExpired() { return OffsetDateTime.now().isAfter(expiry); } } }
class TokenCacheTests { private static final Random RANDOM = new Random(); @Test @Test public void testLongRunningWontOverflow() throws Exception { AtomicLong refreshes = new AtomicLong(0); SimpleTokenCache cache = new SimpleTokenCache(() -> { refreshes.incrementAndGet(); return remoteGetTokenThatExpiresSoonAsync(1000, 0); }); VirtualTimeScheduler virtualTimeScheduler = VirtualTimeScheduler.create(); CountDownLatch latch = new CountDownLatch(1); Flux.interval(Duration.ofMillis(100), virtualTimeScheduler) .take(100) .flatMap(i -> Mono.just(OffsetDateTime.now()) .subscribeOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { }))) .doOnComplete(latch::countDown) .subscribe(); virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(40)); latch.await(); Assertions.assertTrue(refreshes.get() <= 11); } private Mono<AccessToken> remoteGetTokenAsync(long delayInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private Mono<AccessToken> remoteGetTokenThatExpiresSoonAsync(long delayInMillis, long validityInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)), validityInMillis)); } private Mono<AccessToken> incrementalRemoteGetTokenAsync(AtomicInteger latency) { return Mono.delay(Duration.ofSeconds(latency.getAndIncrement())) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private static class Token extends AccessToken { private String token; private OffsetDateTime expiry; @Override public String getToken() { return token; } Token(String token) { this(token, 5000); } Token(String token, long validityInMillis) { super(token, OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis))); this.token = token; this.expiry = OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis)); } @Override public OffsetDateTime getExpiresAt() { return expiry; } @Override public boolean isExpired() { return OffsetDateTime.now().isAfter(expiry); } } }
same here
public void testLongRunningWontOverflow() throws Exception { AtomicLong refreshes = new AtomicLong(0); SimpleTokenCache cache = new SimpleTokenCache(() -> { refreshes.incrementAndGet(); return remoteGetTokenThatExpiresSoonAsync(1000, 0); }); VirtualTimeScheduler virtualTimeScheduler = VirtualTimeScheduler.create(); CountDownLatch latch = new CountDownLatch(1); Flux.interval(Duration.ofMillis(100), virtualTimeScheduler) .take(100) .flatMap(i -> Mono.just(OffsetDateTime.now()) .subscribeOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { }))) .doOnComplete(latch::countDown) .subscribe(); virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(40)); latch.await(); Assertions.assertTrue(refreshes.get() <= 11); }
.subscribeOn(Schedulers.parallel())
public void testLongRunningWontOverflow() throws Exception { AtomicLong refreshes = new AtomicLong(0); SimpleTokenCache cache = new SimpleTokenCache(() -> { refreshes.incrementAndGet(); return remoteGetTokenThatExpiresSoonAsync(1000, 0); }); VirtualTimeScheduler virtualTimeScheduler = VirtualTimeScheduler.create(); CountDownLatch latch = new CountDownLatch(1); Flux.interval(Duration.ofMillis(100), virtualTimeScheduler) .take(100) .flatMap(i -> Mono.just(OffsetDateTime.now()) .subscribeOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { }))) .doOnComplete(latch::countDown) .subscribe(); virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(40)); latch.await(); Assertions.assertTrue(refreshes.get() <= 11); }
class TokenCacheTests { private static final Random RANDOM = new Random(); @Test public void testOnlyOneThreadRefreshesToken() throws Exception { SimpleTokenCache cache = new SimpleTokenCache(() -> incrementalRemoteGetTokenAsync(new AtomicInteger(1))); CountDownLatch latch = new CountDownLatch(1); AtomicLong maxMillis = new AtomicLong(0); Flux.range(1, 10) .flatMap(i -> Mono.just(OffsetDateTime.now()) .publishOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { if (millis > maxMillis.get()) { maxMillis.set(millis); } }))) .doOnComplete(latch::countDown) .subscribe(); latch.await(); long maxMs = maxMillis.get(); Assertions.assertTrue(maxMs > 1000, () -> String.format("maxMillis was less than 1000ms. Was %d.", maxMs)); Assertions.assertTrue(maxMs < 2000, () -> String.format("maxMillis was greater than 2000ms. Was %d.", maxMs)); } @Test private Mono<AccessToken> remoteGetTokenAsync(long delayInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private Mono<AccessToken> remoteGetTokenThatExpiresSoonAsync(long delayInMillis, long validityInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)), validityInMillis)); } private Mono<AccessToken> incrementalRemoteGetTokenAsync(AtomicInteger latency) { return Mono.delay(Duration.ofSeconds(latency.getAndIncrement())) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private static class Token extends AccessToken { private String token; private OffsetDateTime expiry; @Override public String getToken() { return token; } Token(String token) { this(token, 5000); } Token(String token, long validityInMillis) { super(token, OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis))); this.token = token; this.expiry = OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis)); } @Override public OffsetDateTime getExpiresAt() { return expiry; } @Override public boolean isExpired() { return OffsetDateTime.now().isAfter(expiry); } } }
class TokenCacheTests { private static final Random RANDOM = new Random(); @Test public void testOnlyOneThreadRefreshesToken() throws Exception { SimpleTokenCache cache = new SimpleTokenCache(() -> incrementalRemoteGetTokenAsync(new AtomicInteger(1))); CountDownLatch latch = new CountDownLatch(1); AtomicLong maxMillis = new AtomicLong(0); Flux.range(1, 10) .flatMap(i -> Mono.just(OffsetDateTime.now()) .publishOn(Schedulers.parallel()) .flatMap(start -> cache.getToken() .map(t -> Duration.between(start, OffsetDateTime.now()).toMillis()) .doOnNext(millis -> { if (millis > maxMillis.get()) { maxMillis.set(millis); } }))) .doOnComplete(latch::countDown) .subscribe(); latch.await(); long maxMs = maxMillis.get(); Assertions.assertTrue(maxMs > 1000, () -> String.format("maxMillis was less than 1000ms. Was %d.", maxMs)); Assertions.assertTrue(maxMs < 2000, () -> String.format("maxMillis was greater than 2000ms. Was %d.", maxMs)); } @Test private Mono<AccessToken> remoteGetTokenAsync(long delayInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private Mono<AccessToken> remoteGetTokenThatExpiresSoonAsync(long delayInMillis, long validityInMillis) { return Mono.delay(Duration.ofMillis(delayInMillis)) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)), validityInMillis)); } private Mono<AccessToken> incrementalRemoteGetTokenAsync(AtomicInteger latency) { return Mono.delay(Duration.ofSeconds(latency.getAndIncrement())) .map(l -> new Token(Integer.toString(RANDOM.nextInt(100)))); } private static class Token extends AccessToken { private String token; private OffsetDateTime expiry; @Override public String getToken() { return token; } Token(String token) { this(token, 5000); } Token(String token, long validityInMillis) { super(token, OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis))); this.token = token; this.expiry = OffsetDateTime.now().plus(Duration.ofMillis(validityInMillis)); } @Override public OffsetDateTime getExpiresAt() { return expiry; } @Override public boolean isExpired() { return OffsetDateTime.now().isAfter(expiry); } } }
Can `ArmChallengeAuthenticationPolicy` be removed from `experimental` package? It is already copied over to`azure-core-management`. cc: @weidongxu-microsoft, @g2vinay
public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.defer(() -> { String authHeader = response.getHeaderValue(WWW_AUTHENTICATE); if (response.getStatusCode() == 401 && authHeader != null) { List<AuthenticationChallenge> challenges = parseChallenges(authHeader); for (AuthenticationChallenge authenticationChallenge : challenges) { Map<String, String> extractedChallengeParams = parseChallengeParams(authenticationChallenge.getChallengeParameters()); if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) { String claims = new String(Base64.getUrlDecoder() .decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8); String[] scopes; try { scopes = (String[]) context.getData(ARM_SCOPES_KEY).get(); } catch (NoSuchElementException e) { scopes = this.scopes; } scopes = getScopes(context, scopes); return setAuthorizationHeader(context, new TokenRequestContext() .addScopes(scopes).setClaims(claims)) .flatMap(b -> Mono.just(true)); } } } return Mono.just(false); }); }
if (response.getStatusCode() == 401 && authHeader != null) {
public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.defer(() -> { String authHeader = response.getHeaderValue(WWW_AUTHENTICATE); if (!(response.getStatusCode() == 401 && authHeader != null)) { return Mono.just(false); } else { List<AuthenticationChallenge> challenges = parseChallenges(authHeader); for (AuthenticationChallenge authenticationChallenge : challenges) { Map<String, String> extractedChallengeParams = parseChallengeParams(authenticationChallenge.getChallengeParameters()); if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) { String claims = new String(Base64.getUrlDecoder() .decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8); String[] scopes; try { scopes = (String[]) context.getData(ARM_SCOPES_KEY).get(); } catch (NoSuchElementException e) { scopes = this.scopes; } scopes = getScopes(context, scopes); return setAuthorizationHeader(context, new TokenRequestContext() .addScopes(scopes).setClaims(claims)) .flatMap(b -> Mono.just(true)); } } return Mono.just(false); } }); }
class ArmChallengeAuthenticationPolicy extends BearerTokenAuthenticationChallengePolicy { private static final Pattern AUTHENTICATION_CHALLENGE_PATTERN = Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?"); private static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN = Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+"); private static final String CLAIMS_PARAMETER = "claims"; private final String[] scopes; private final AzureEnvironment environment; private static final String ARM_SCOPES_KEY = "ARMScopes"; /** * Creates ArmChallengeAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param environment the environment with endpoints for authentication * @param scopes the scopes used in credential, using default scopes when empty */ public ArmChallengeAuthenticationPolicy(TokenCredential credential, AzureEnvironment environment, String... scopes) { super(credential, scopes); this.scopes = scopes; this.environment = environment; } @Override public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { return Mono.defer(() -> { String[] scopes = this.scopes; scopes = getScopes(context, scopes); context.setData(ARM_SCOPES_KEY, scopes); return setAuthorizationHeader(context, new TokenRequestContext().addScopes(scopes)); }); } @Override private String[] getScopes(HttpPipelineCallContext context, String[] scopes) { if (CoreUtils.isNullOrEmpty(scopes)) { scopes = new String[1]; scopes[0] = ARMScopeHelper.getDefaultScopeFromRequest(context.getHttpRequest(), environment); } return scopes; } List<AuthenticationChallenge> parseChallenges(String header) { Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); List<AuthenticationChallenge> challenges = new ArrayList<>(); while (matcher.find()) { challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2))); } return challenges; } Map<String, String> parseChallengeParams(String challengeParams) { Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams); Map<String, String> challengeParameters = new HashMap<>(); while (matcher.find()) { challengeParameters.put(matcher.group(1), matcher.group(2)); } return challengeParameters; } }
class ArmChallengeAuthenticationPolicy extends BearerTokenAuthenticationChallengePolicy { private static final Pattern AUTHENTICATION_CHALLENGE_PATTERN = Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?"); private static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN = Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+"); private static final String CLAIMS_PARAMETER = "claims"; private final String[] scopes; private final AzureEnvironment environment; private static final String ARM_SCOPES_KEY = "ARMScopes"; /** * Creates ArmChallengeAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param environment the environment with endpoints for authentication * @param scopes the scopes used in credential, using default scopes when empty */ public ArmChallengeAuthenticationPolicy(TokenCredential credential, AzureEnvironment environment, String... scopes) { super(credential, scopes); this.scopes = scopes; this.environment = environment; } @Override public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { return Mono.defer(() -> { String[] scopes = this.scopes; scopes = getScopes(context, scopes); context.setData(ARM_SCOPES_KEY, scopes); return setAuthorizationHeader(context, new TokenRequestContext().addScopes(scopes)); }); } @Override private String[] getScopes(HttpPipelineCallContext context, String[] scopes) { if (CoreUtils.isNullOrEmpty(scopes)) { scopes = new String[1]; scopes[0] = ARMScopeHelper.getDefaultScopeFromRequest( context.getHttpRequest(), environment); } return scopes; } List<AuthenticationChallenge> parseChallenges(String header) { Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); List<AuthenticationChallenge> challenges = new ArrayList<>(); while (matcher.find()) { challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2))); } return challenges; } Map<String, String> parseChallengeParams(String challengeParams) { Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams); Map<String, String> challengeParameters = new HashMap<>(); while (matcher.find()) { challengeParameters.put(matcher.group(1), matcher.group(2)); } return challengeParameters; } }
Yes, it can be removed.
public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.defer(() -> { String authHeader = response.getHeaderValue(WWW_AUTHENTICATE); if (response.getStatusCode() == 401 && authHeader != null) { List<AuthenticationChallenge> challenges = parseChallenges(authHeader); for (AuthenticationChallenge authenticationChallenge : challenges) { Map<String, String> extractedChallengeParams = parseChallengeParams(authenticationChallenge.getChallengeParameters()); if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) { String claims = new String(Base64.getUrlDecoder() .decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8); String[] scopes; try { scopes = (String[]) context.getData(ARM_SCOPES_KEY).get(); } catch (NoSuchElementException e) { scopes = this.scopes; } scopes = getScopes(context, scopes); return setAuthorizationHeader(context, new TokenRequestContext() .addScopes(scopes).setClaims(claims)) .flatMap(b -> Mono.just(true)); } } } return Mono.just(false); }); }
if (response.getStatusCode() == 401 && authHeader != null) {
public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.defer(() -> { String authHeader = response.getHeaderValue(WWW_AUTHENTICATE); if (!(response.getStatusCode() == 401 && authHeader != null)) { return Mono.just(false); } else { List<AuthenticationChallenge> challenges = parseChallenges(authHeader); for (AuthenticationChallenge authenticationChallenge : challenges) { Map<String, String> extractedChallengeParams = parseChallengeParams(authenticationChallenge.getChallengeParameters()); if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) { String claims = new String(Base64.getUrlDecoder() .decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8); String[] scopes; try { scopes = (String[]) context.getData(ARM_SCOPES_KEY).get(); } catch (NoSuchElementException e) { scopes = this.scopes; } scopes = getScopes(context, scopes); return setAuthorizationHeader(context, new TokenRequestContext() .addScopes(scopes).setClaims(claims)) .flatMap(b -> Mono.just(true)); } } return Mono.just(false); } }); }
class ArmChallengeAuthenticationPolicy extends BearerTokenAuthenticationChallengePolicy { private static final Pattern AUTHENTICATION_CHALLENGE_PATTERN = Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?"); private static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN = Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+"); private static final String CLAIMS_PARAMETER = "claims"; private final String[] scopes; private final AzureEnvironment environment; private static final String ARM_SCOPES_KEY = "ARMScopes"; /** * Creates ArmChallengeAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param environment the environment with endpoints for authentication * @param scopes the scopes used in credential, using default scopes when empty */ public ArmChallengeAuthenticationPolicy(TokenCredential credential, AzureEnvironment environment, String... scopes) { super(credential, scopes); this.scopes = scopes; this.environment = environment; } @Override public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { return Mono.defer(() -> { String[] scopes = this.scopes; scopes = getScopes(context, scopes); context.setData(ARM_SCOPES_KEY, scopes); return setAuthorizationHeader(context, new TokenRequestContext().addScopes(scopes)); }); } @Override private String[] getScopes(HttpPipelineCallContext context, String[] scopes) { if (CoreUtils.isNullOrEmpty(scopes)) { scopes = new String[1]; scopes[0] = ARMScopeHelper.getDefaultScopeFromRequest(context.getHttpRequest(), environment); } return scopes; } List<AuthenticationChallenge> parseChallenges(String header) { Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); List<AuthenticationChallenge> challenges = new ArrayList<>(); while (matcher.find()) { challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2))); } return challenges; } Map<String, String> parseChallengeParams(String challengeParams) { Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams); Map<String, String> challengeParameters = new HashMap<>(); while (matcher.find()) { challengeParameters.put(matcher.group(1), matcher.group(2)); } return challengeParameters; } }
class ArmChallengeAuthenticationPolicy extends BearerTokenAuthenticationChallengePolicy { private static final Pattern AUTHENTICATION_CHALLENGE_PATTERN = Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?"); private static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN = Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+"); private static final String CLAIMS_PARAMETER = "claims"; private final String[] scopes; private final AzureEnvironment environment; private static final String ARM_SCOPES_KEY = "ARMScopes"; /** * Creates ArmChallengeAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param environment the environment with endpoints for authentication * @param scopes the scopes used in credential, using default scopes when empty */ public ArmChallengeAuthenticationPolicy(TokenCredential credential, AzureEnvironment environment, String... scopes) { super(credential, scopes); this.scopes = scopes; this.environment = environment; } @Override public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { return Mono.defer(() -> { String[] scopes = this.scopes; scopes = getScopes(context, scopes); context.setData(ARM_SCOPES_KEY, scopes); return setAuthorizationHeader(context, new TokenRequestContext().addScopes(scopes)); }); } @Override private String[] getScopes(HttpPipelineCallContext context, String[] scopes) { if (CoreUtils.isNullOrEmpty(scopes)) { scopes = new String[1]; scopes[0] = ARMScopeHelper.getDefaultScopeFromRequest( context.getHttpRequest(), environment); } return scopes; } List<AuthenticationChallenge> parseChallenges(String header) { Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); List<AuthenticationChallenge> challenges = new ArrayList<>(); while (matcher.find()) { challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2))); } return challenges; } Map<String, String> parseChallengeParams(String challengeParams) { Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams); Map<String, String> challengeParameters = new HashMap<>(); while (matcher.find()) { challengeParameters.put(matcher.group(1), matcher.group(2)); } return challengeParameters; } }
yeah, we can remove it.
public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.defer(() -> { String authHeader = response.getHeaderValue(WWW_AUTHENTICATE); if (response.getStatusCode() == 401 && authHeader != null) { List<AuthenticationChallenge> challenges = parseChallenges(authHeader); for (AuthenticationChallenge authenticationChallenge : challenges) { Map<String, String> extractedChallengeParams = parseChallengeParams(authenticationChallenge.getChallengeParameters()); if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) { String claims = new String(Base64.getUrlDecoder() .decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8); String[] scopes; try { scopes = (String[]) context.getData(ARM_SCOPES_KEY).get(); } catch (NoSuchElementException e) { scopes = this.scopes; } scopes = getScopes(context, scopes); return setAuthorizationHeader(context, new TokenRequestContext() .addScopes(scopes).setClaims(claims)) .flatMap(b -> Mono.just(true)); } } } return Mono.just(false); }); }
if (response.getStatusCode() == 401 && authHeader != null) {
public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.defer(() -> { String authHeader = response.getHeaderValue(WWW_AUTHENTICATE); if (!(response.getStatusCode() == 401 && authHeader != null)) { return Mono.just(false); } else { List<AuthenticationChallenge> challenges = parseChallenges(authHeader); for (AuthenticationChallenge authenticationChallenge : challenges) { Map<String, String> extractedChallengeParams = parseChallengeParams(authenticationChallenge.getChallengeParameters()); if (extractedChallengeParams.containsKey(CLAIMS_PARAMETER)) { String claims = new String(Base64.getUrlDecoder() .decode(extractedChallengeParams.get(CLAIMS_PARAMETER)), StandardCharsets.UTF_8); String[] scopes; try { scopes = (String[]) context.getData(ARM_SCOPES_KEY).get(); } catch (NoSuchElementException e) { scopes = this.scopes; } scopes = getScopes(context, scopes); return setAuthorizationHeader(context, new TokenRequestContext() .addScopes(scopes).setClaims(claims)) .flatMap(b -> Mono.just(true)); } } return Mono.just(false); } }); }
class ArmChallengeAuthenticationPolicy extends BearerTokenAuthenticationChallengePolicy { private static final Pattern AUTHENTICATION_CHALLENGE_PATTERN = Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?"); private static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN = Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+"); private static final String CLAIMS_PARAMETER = "claims"; private final String[] scopes; private final AzureEnvironment environment; private static final String ARM_SCOPES_KEY = "ARMScopes"; /** * Creates ArmChallengeAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param environment the environment with endpoints for authentication * @param scopes the scopes used in credential, using default scopes when empty */ public ArmChallengeAuthenticationPolicy(TokenCredential credential, AzureEnvironment environment, String... scopes) { super(credential, scopes); this.scopes = scopes; this.environment = environment; } @Override public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { return Mono.defer(() -> { String[] scopes = this.scopes; scopes = getScopes(context, scopes); context.setData(ARM_SCOPES_KEY, scopes); return setAuthorizationHeader(context, new TokenRequestContext().addScopes(scopes)); }); } @Override private String[] getScopes(HttpPipelineCallContext context, String[] scopes) { if (CoreUtils.isNullOrEmpty(scopes)) { scopes = new String[1]; scopes[0] = ARMScopeHelper.getDefaultScopeFromRequest(context.getHttpRequest(), environment); } return scopes; } List<AuthenticationChallenge> parseChallenges(String header) { Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); List<AuthenticationChallenge> challenges = new ArrayList<>(); while (matcher.find()) { challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2))); } return challenges; } Map<String, String> parseChallengeParams(String challengeParams) { Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams); Map<String, String> challengeParameters = new HashMap<>(); while (matcher.find()) { challengeParameters.put(matcher.group(1), matcher.group(2)); } return challengeParameters; } }
class ArmChallengeAuthenticationPolicy extends BearerTokenAuthenticationChallengePolicy { private static final Pattern AUTHENTICATION_CHALLENGE_PATTERN = Pattern.compile("(\\w+) ((?:\\w+=\".*?\"(?:, )?)+)(?:, )?"); private static final Pattern AUTHENTICATION_CHALLENGE_PARAMS_PATTERN = Pattern.compile("(?:(\\w+)=\"([^\"\"]*)\")+"); private static final String CLAIMS_PARAMETER = "claims"; private final String[] scopes; private final AzureEnvironment environment; private static final String ARM_SCOPES_KEY = "ARMScopes"; /** * Creates ArmChallengeAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param environment the environment with endpoints for authentication * @param scopes the scopes used in credential, using default scopes when empty */ public ArmChallengeAuthenticationPolicy(TokenCredential credential, AzureEnvironment environment, String... scopes) { super(credential, scopes); this.scopes = scopes; this.environment = environment; } @Override public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { return Mono.defer(() -> { String[] scopes = this.scopes; scopes = getScopes(context, scopes); context.setData(ARM_SCOPES_KEY, scopes); return setAuthorizationHeader(context, new TokenRequestContext().addScopes(scopes)); }); } @Override private String[] getScopes(HttpPipelineCallContext context, String[] scopes) { if (CoreUtils.isNullOrEmpty(scopes)) { scopes = new String[1]; scopes[0] = ARMScopeHelper.getDefaultScopeFromRequest( context.getHttpRequest(), environment); } return scopes; } List<AuthenticationChallenge> parseChallenges(String header) { Matcher matcher = AUTHENTICATION_CHALLENGE_PATTERN.matcher(header); List<AuthenticationChallenge> challenges = new ArrayList<>(); while (matcher.find()) { challenges.add(new AuthenticationChallenge(matcher.group(1), matcher.group(2))); } return challenges; } Map<String, String> parseChallengeParams(String challengeParams) { Matcher matcher = AUTHENTICATION_CHALLENGE_PARAMS_PATTERN.matcher(challengeParams); Map<String, String> challengeParameters = new HashMap<>(); while (matcher.find()) { challengeParameters.put(matcher.group(1), matcher.group(2)); } return challengeParameters; } }
Given its concurrent safe map, I think there won't be any race with calling clear+computeIfAbsent concurrently.
public static UrlBuilder parse(String url) { /* * Parsing the URL string into a UrlBuilder is a non-trivial operation and many calls into RestProxy will use * the same root URL string. To save CPU costs we retain a parsed version of the URL string in memory. Given * that UrlBuilder is mutable we must return a cloned version of the cached UrlBuilder. */ String concurrentSafeUrl = (url == null) ? "" : url; if (PARSED_URLS.size() >= MAX_CACHE_SIZE) { PARSED_URLS.clear(); } return PARSED_URLS.computeIfAbsent(concurrentSafeUrl, u -> new UrlBuilder().with(u, UrlTokenizerState.SCHEME_OR_HOST)).copy(); }
PARSED_URLS.clear();
public static UrlBuilder parse(String url) { /* * Parsing the URL string into a UrlBuilder is a non-trivial operation and many calls into RestProxy will use * the same root URL string. To save CPU costs we retain a parsed version of the URL string in memory. Given * that UrlBuilder is mutable we must return a cloned version of the cached UrlBuilder. */ String concurrentSafeUrl = (url == null) ? "" : url; if (PARSED_URLS.size() >= MAX_CACHE_SIZE) { PARSED_URLS.clear(); } return PARSED_URLS.computeIfAbsent(concurrentSafeUrl, u -> new UrlBuilder().with(u, UrlTokenizerState.SCHEME_OR_HOST)).copy(); }
class UrlBuilder { private static final Map<String, UrlBuilder> PARSED_URLS = new ConcurrentHashMap<>(); private static final int MAX_CACHE_SIZE = 10000; private String scheme; private String host; private String port; private String path; private final Map<String, String> query = new LinkedHashMap<>(); /** * Set the scheme/protocol that will be used to build the final URL. * * @param scheme The scheme/protocol that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setScheme(String scheme) { if (scheme == null || scheme.isEmpty()) { this.scheme = null; } else { with(scheme, UrlTokenizerState.SCHEME); } return this; } /** * Get the scheme/protocol that has been assigned to this UrlBuilder. * * @return the scheme/protocol that has been assigned to this UrlBuilder. */ public String getScheme() { return scheme; } /** * Set the host that will be used to build the final URL. * * @param host The host that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setHost(String host) { if (host == null || host.isEmpty()) { this.host = null; } else { with(host, UrlTokenizerState.SCHEME_OR_HOST); } return this; } /** * Get the host that has been assigned to this UrlBuilder. * * @return the host that has been assigned to this UrlBuilder. */ public String getHost() { return host; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(String port) { if (port == null || port.isEmpty()) { this.port = null; } else { with(port, UrlTokenizerState.PORT); } return this; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(int port) { return setPort(Integer.toString(port)); } /** * Get the port that has been assigned to this UrlBuilder. * * @return the port that has been assigned to this UrlBuilder. */ public Integer getPort() { return port == null ? null : Integer.valueOf(port); } /** * Set the path that will be used to build the final URL. * * @param path The path that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPath(String path) { if (path == null || path.isEmpty()) { this.path = null; } else { with(path, UrlTokenizerState.PATH); } return this; } /** * Get the path that has been assigned to this UrlBuilder. * * @return the path that has been assigned to this UrlBuilder. */ public String getPath() { return path; } /** * Set the provided query parameter name and encoded value to query string for the final URL. * * @param queryParameterName The name of the query parameter. * @param queryParameterEncodedValue The encoded value of the query parameter. * @return The provided query parameter name and encoded value to query string for the final URL. */ public UrlBuilder setQueryParameter(String queryParameterName, String queryParameterEncodedValue) { query.put(queryParameterName, queryParameterEncodedValue); return this; } /** * Set the query that will be used to build the final URL. * * @param query The query that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setQuery(String query) { if (query == null || query.isEmpty()) { this.query.clear(); } else { with(query, UrlTokenizerState.QUERY); } return this; } /** * Get the query that has been assigned to this UrlBuilder. * * @return the query that has been assigned to this UrlBuilder. */ public Map<String, String> getQuery() { return query; } /** * Returns the query string currently configured in this UrlBuilder instance. * @return A String containing the currently configured query string. */ public String getQueryString() { if (query.isEmpty()) { return ""; } StringBuilder queryBuilder = new StringBuilder("?"); for (Map.Entry<String, String> entry : query.entrySet()) { if (queryBuilder.length() > 1) { queryBuilder.append("&"); } queryBuilder.append(entry.getKey()); queryBuilder.append("="); queryBuilder.append(entry.getValue()); } return queryBuilder.toString(); } private UrlBuilder with(String text, UrlTokenizerState startState) { final UrlTokenizer tokenizer = new UrlTokenizer(text, startState); while (tokenizer.next()) { final UrlToken token = tokenizer.current(); final String tokenText = token.text(); final UrlTokenType tokenType = token.type(); switch (tokenType) { case SCHEME: scheme = emptyToNull(tokenText); break; case HOST: host = emptyToNull(tokenText); break; case PORT: port = emptyToNull(tokenText); break; case PATH: final String tokenPath = emptyToNull(tokenText); if (path == null || path.equals("/") || !tokenPath.equals("/")) { path = tokenPath; } break; case QUERY: String queryString = emptyToNull(tokenText); if (queryString != null) { if (queryString.startsWith("?")) { queryString = queryString.substring(1); } for (String entry : queryString.split("&")) { String[] nameValue = entry.split("="); if (nameValue.length == 2) { setQueryParameter(nameValue[0], nameValue[1]); } else { setQueryParameter(nameValue[0], ""); } } } break; default: break; } } return this; } /** * Get the URL that is being built. * * @return The URL that is being built. * @throws MalformedURLException if the URL is not fully formed. */ public URL toUrl() throws MalformedURLException { return new URL(toString()); } /** * Get the string representation of the URL that is being built. * * @return The string representation of the URL that is being built. */ @Override public String toString() { final StringBuilder result = new StringBuilder(); final boolean isAbsolutePath = path != null && (path.startsWith("http: if (!isAbsolutePath) { if (scheme != null) { result.append(scheme); if (!scheme.endsWith(": result.append(": } } if (host != null) { result.append(host); } } if (port != null) { result.append(":"); result.append(port); } if (path != null) { if (result.length() != 0 && !path.startsWith("/")) { result.append('/'); } result.append(path); } result.append(getQueryString()); return result.toString(); } /** * Returns the map of parsed URLs and their {@link UrlBuilder UrlBuilders} * @return the map of parsed URLs and their {@link UrlBuilder UrlBuilders} */ static Map<String, UrlBuilder> getParsedUrls() { return PARSED_URLS; } /** * Parses the passed {@code url} string into a UrlBuilder. * * @param url The URL string to parse. * @return The UrlBuilder that was created from parsing the passed URL string. */ /** * Parse a UrlBuilder from the provided URL object. * * @param url The URL object to parse. * @return The UrlBuilder that was parsed from the URL object. */ public static UrlBuilder parse(URL url) { final UrlBuilder result = new UrlBuilder(); if (url != null) { final String protocol = url.getProtocol(); if (protocol != null && !protocol.isEmpty()) { result.setScheme(protocol); } final String host = url.getHost(); if (host != null && !host.isEmpty()) { result.setHost(host); } final int port = url.getPort(); if (port != -1) { result.setPort(port); } final String path = url.getPath(); if (path != null && !path.isEmpty()) { result.setPath(path); } final String query = url.getQuery(); if (query != null && !query.isEmpty()) { result.setQuery(query); } } return result; } private static String emptyToNull(String value) { return value == null || value.isEmpty() ? null : value; } private UrlBuilder copy() { UrlBuilder copy = new UrlBuilder(); copy.scheme = this.scheme; copy.host = this.host; copy.path = this.path; copy.port = this.port; copy.query.putAll(this.query); return copy; } }
class UrlBuilder { private static final Map<String, UrlBuilder> PARSED_URLS = new ConcurrentHashMap<>(); private static final int MAX_CACHE_SIZE = 10000; private String scheme; private String host; private String port; private String path; private final Map<String, String> query = new LinkedHashMap<>(); /** * Set the scheme/protocol that will be used to build the final URL. * * @param scheme The scheme/protocol that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setScheme(String scheme) { if (scheme == null || scheme.isEmpty()) { this.scheme = null; } else { with(scheme, UrlTokenizerState.SCHEME); } return this; } /** * Get the scheme/protocol that has been assigned to this UrlBuilder. * * @return the scheme/protocol that has been assigned to this UrlBuilder. */ public String getScheme() { return scheme; } /** * Set the host that will be used to build the final URL. * * @param host The host that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setHost(String host) { if (host == null || host.isEmpty()) { this.host = null; } else { with(host, UrlTokenizerState.SCHEME_OR_HOST); } return this; } /** * Get the host that has been assigned to this UrlBuilder. * * @return the host that has been assigned to this UrlBuilder. */ public String getHost() { return host; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(String port) { if (port == null || port.isEmpty()) { this.port = null; } else { with(port, UrlTokenizerState.PORT); } return this; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(int port) { return setPort(Integer.toString(port)); } /** * Get the port that has been assigned to this UrlBuilder. * * @return the port that has been assigned to this UrlBuilder. */ public Integer getPort() { return port == null ? null : Integer.valueOf(port); } /** * Set the path that will be used to build the final URL. * * @param path The path that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPath(String path) { if (path == null || path.isEmpty()) { this.path = null; } else { with(path, UrlTokenizerState.PATH); } return this; } /** * Get the path that has been assigned to this UrlBuilder. * * @return the path that has been assigned to this UrlBuilder. */ public String getPath() { return path; } /** * Set the provided query parameter name and encoded value to query string for the final URL. * * @param queryParameterName The name of the query parameter. * @param queryParameterEncodedValue The encoded value of the query parameter. * @return The provided query parameter name and encoded value to query string for the final URL. */ public UrlBuilder setQueryParameter(String queryParameterName, String queryParameterEncodedValue) { query.put(queryParameterName, queryParameterEncodedValue); return this; } /** * Set the query that will be used to build the final URL. * * @param query The query that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setQuery(String query) { if (query == null || query.isEmpty()) { this.query.clear(); } else { with(query, UrlTokenizerState.QUERY); } return this; } /** * Get the query that has been assigned to this UrlBuilder. * * @return the query that has been assigned to this UrlBuilder. */ public Map<String, String> getQuery() { return query; } /** * Returns the query string currently configured in this UrlBuilder instance. * @return A String containing the currently configured query string. */ public String getQueryString() { if (query.isEmpty()) { return ""; } StringBuilder queryBuilder = new StringBuilder("?"); for (Map.Entry<String, String> entry : query.entrySet()) { if (queryBuilder.length() > 1) { queryBuilder.append("&"); } queryBuilder.append(entry.getKey()); queryBuilder.append("="); queryBuilder.append(entry.getValue()); } return queryBuilder.toString(); } private UrlBuilder with(String text, UrlTokenizerState startState) { final UrlTokenizer tokenizer = new UrlTokenizer(text, startState); while (tokenizer.next()) { final UrlToken token = tokenizer.current(); final String tokenText = token.text(); final UrlTokenType tokenType = token.type(); switch (tokenType) { case SCHEME: scheme = emptyToNull(tokenText); break; case HOST: host = emptyToNull(tokenText); break; case PORT: port = emptyToNull(tokenText); break; case PATH: final String tokenPath = emptyToNull(tokenText); if (path == null || path.equals("/") || !tokenPath.equals("/")) { path = tokenPath; } break; case QUERY: String queryString = emptyToNull(tokenText); if (queryString != null) { if (queryString.startsWith("?")) { queryString = queryString.substring(1); } for (String entry : queryString.split("&")) { String[] nameValue = entry.split("="); if (nameValue.length == 2) { setQueryParameter(nameValue[0], nameValue[1]); } else { setQueryParameter(nameValue[0], ""); } } } break; default: break; } } return this; } /** * Get the URL that is being built. * * @return The URL that is being built. * @throws MalformedURLException if the URL is not fully formed. */ public URL toUrl() throws MalformedURLException { return new URL(toString()); } /** * Get the string representation of the URL that is being built. * * @return The string representation of the URL that is being built. */ @Override public String toString() { final StringBuilder result = new StringBuilder(); final boolean isAbsolutePath = path != null && (path.startsWith("http: if (!isAbsolutePath) { if (scheme != null) { result.append(scheme); if (!scheme.endsWith(": result.append(": } } if (host != null) { result.append(host); } } if (port != null) { result.append(":"); result.append(port); } if (path != null) { if (result.length() != 0 && !path.startsWith("/")) { result.append('/'); } result.append(path); } result.append(getQueryString()); return result.toString(); } /** * Returns the map of parsed URLs and their {@link UrlBuilder UrlBuilders} * @return the map of parsed URLs and their {@link UrlBuilder UrlBuilders} */ static Map<String, UrlBuilder> getParsedUrls() { return PARSED_URLS; } /** * Parses the passed {@code url} string into a UrlBuilder. * * @param url The URL string to parse. * @return The UrlBuilder that was created from parsing the passed URL string. */ /** * Parse a UrlBuilder from the provided URL object. * * @param url The URL object to parse. * @return The UrlBuilder that was parsed from the URL object. */ public static UrlBuilder parse(URL url) { final UrlBuilder result = new UrlBuilder(); if (url != null) { final String protocol = url.getProtocol(); if (protocol != null && !protocol.isEmpty()) { result.setScheme(protocol); } final String host = url.getHost(); if (host != null && !host.isEmpty()) { result.setHost(host); } final int port = url.getPort(); if (port != -1) { result.setPort(port); } final String path = url.getPath(); if (path != null && !path.isEmpty()) { result.setPath(path); } final String query = url.getQuery(); if (query != null && !query.isEmpty()) { result.setQuery(query); } } return result; } private static String emptyToNull(String value) { return value == null || value.isEmpty() ? null : value; } private UrlBuilder copy() { UrlBuilder copy = new UrlBuilder(); copy.scheme = this.scheme; copy.host = this.host; copy.path = this.path; copy.port = this.port; copy.query.putAll(this.query); return copy; } }
Don't throw RuntimeException directly. Use `IllegalArgumentException`
private void updateSettingValue() { try { super.setValue(writeFeatureFlagConfigurationSetting(this)); } catch (IOException exception) { LOGGER.logExceptionAsError(new RuntimeException("Can't parse Feature Flag configuration setting value.")); } }
LOGGER.logExceptionAsError(new RuntimeException("Can't parse Feature Flag configuration setting value."));
private void updateSettingValue() { try { super.setValue(writeFeatureFlagConfigurationSetting(this)); } catch (IOException exception) { LOGGER.logExceptionAsError(new IllegalArgumentException( "Can't parse Feature Flag configuration setting value.", exception)); } }
class FeatureFlagConfigurationSetting extends ConfigurationSetting { private static final ClientLogger LOGGER = new ClientLogger(FeatureFlagConfigurationSetting.class); private static final String FEATURE_FLAG_CONTENT_TYPE = "application/vnd.microsoft.appconfig.ff+json;charset=utf-8"; private String featureId; private boolean isEnabled; private String description; private String displayName; private List<FeatureFlagFilter> clientFilters; /** * A prefix is used to construct a feature flag configuration setting's key. */ public static final String KEY_PREFIX = ".appconfig.featureflag/"; /** * The constructor for a feature flag configuration setting. * * @param featureId A feature flag identification value that used to construct in setting's key. The key of setting * is {@code KEY_PREFIX} concatenate {@code featureId}. * @param isEnabled A boolean value to turn on/off the feature flag setting. */ public FeatureFlagConfigurationSetting(String featureId, boolean isEnabled) { this.featureId = featureId; this.isEnabled = isEnabled; super.setKey(KEY_PREFIX + featureId); super.setContentType(FEATURE_FLAG_CONTENT_TYPE); } /** * Sets the key of this setting. * * @param key The key to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setKey(String key) { super.setKey(key); return this; } /** * Sets the value of this setting. * * @param value The value to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setValue(String value) { super.setValue(value); final FeatureFlagConfigurationSetting updatedSetting = readFeatureFlagConfigurationSettingValue(value); this.featureId = updatedSetting.getFeatureId(); this.description = updatedSetting.getDescription(); this.isEnabled = updatedSetting.isEnabled(); this.displayName = updatedSetting.getDisplayName(); if (updatedSetting.getClientFilters() != null) { this.clientFilters = StreamSupport.stream(updatedSetting.getClientFilters().spliterator(), false) .collect(Collectors.toList()); } else { this.clientFilters = null; } return this; } /** * Sets the label of this configuration setting. {@link * set. * * @param label The label of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setLabel(String label) { super.setLabel(label); return this; } /** * Sets the content type. By default, the content type is null. * * @param contentType The content type of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setContentType(String contentType) { super.setContentType(contentType); return this; } /** * Sets the ETag for this configuration setting. * * @param etag The ETag for the configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setETag(String etag) { super.setETag(etag); return this; } /** * Sets the tags for this configuration setting. * * @param tags The tags to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setTags(Map<String, String> tags) { super.setTags(tags); return this; } /** * Get the feature ID of this configuration setting. * * @return the feature ID of this configuration setting. */ public String getFeatureId() { return featureId; } /** * Set the feature ID of this configuration setting. * * @param featureId the feature ID of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting setFeatureId(String featureId) { this.featureId = featureId; super.setKey(KEY_PREFIX + featureId); updateSettingValue(); return this; } /** * Get the boolean indicator to show if the setting is turn on or off. * * @return the boolean indicator to show if the setting is turn on or off. */ public boolean isEnabled() { return this.isEnabled; } /** * Set the boolean indicator to show if the setting is turn on or off. * * @param isEnabled the boolean indicator to show if the setting is turn on or off. * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting setEnabled(boolean isEnabled) { this.isEnabled = isEnabled; updateSettingValue(); return this; } /** * Get the description of this configuration setting. * * @return the description of this configuration setting. */ public String getDescription() { return description; } /** * Set the description of this configuration setting. * * @param description the description of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting setDescription(String description) { this.description = description; updateSettingValue(); return this; } /** * Get the display name of this configuration setting. * * @return the display name of this configuration setting. */ public String getDisplayName() { return displayName; } /** * Set the display name of this configuration setting. * * @param displayName the display name of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting setDisplayName(String displayName) { this.displayName = displayName; updateSettingValue(); return this; } /** * Gets the feature flag filters of this configuration setting. * * @return the feature flag filters of this configuration setting. */ public List<FeatureFlagFilter> getClientFilters() { return clientFilters; } /** * Sets the feature flag filters of this configuration setting. * * @param clientFilters the feature flag filters of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting setClientFilters(List<FeatureFlagFilter> clientFilters) { this.clientFilters = clientFilters; updateSettingValue(); return this; } /** * Add a feature flag filter to this configuration setting. * * @param clientFilter a feature flag filter to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting addClientFilter(FeatureFlagFilter clientFilter) { clientFilters.add(clientFilter); updateSettingValue(); return this; } }
class FeatureFlagConfigurationSetting extends ConfigurationSetting { private static final ClientLogger LOGGER = new ClientLogger(FeatureFlagConfigurationSetting.class); private static final String FEATURE_FLAG_CONTENT_TYPE = "application/vnd.microsoft.appconfig.ff+json;charset=utf-8"; private String featureId; private boolean isEnabled; private String description; private String displayName; private List<FeatureFlagFilter> clientFilters; /** * A prefix is used to construct a feature flag configuration setting's key. */ public static final String KEY_PREFIX = ".appconfig.featureflag/"; /** * The constructor for a feature flag configuration setting. * * @param featureId A feature flag identification value that used to construct in setting's key. The key of setting * is {@code KEY_PREFIX} concatenate {@code featureId}. * @param isEnabled A boolean value to turn on/off the feature flag setting. */ public FeatureFlagConfigurationSetting(String featureId, boolean isEnabled) { this.featureId = featureId; this.isEnabled = isEnabled; super.setKey(KEY_PREFIX + featureId); super.setContentType(FEATURE_FLAG_CONTENT_TYPE); } /** * Sets the key of this setting. * * @param key The key to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setKey(String key) { super.setKey(key); return this; } /** * Sets the value of this setting. * * @param value The value to associate with this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ @Override public FeatureFlagConfigurationSetting setValue(String value) { super.setValue(value); final FeatureFlagConfigurationSetting updatedSetting = readFeatureFlagConfigurationSettingValue(value); this.featureId = updatedSetting.getFeatureId(); this.description = updatedSetting.getDescription(); this.isEnabled = updatedSetting.isEnabled(); this.displayName = updatedSetting.getDisplayName(); if (updatedSetting.getClientFilters() != null) { this.clientFilters = StreamSupport.stream(updatedSetting.getClientFilters().spliterator(), false) .collect(Collectors.toList()); } else { this.clientFilters = null; } return this; } /** * Sets the label of this configuration setting. {@link * set. * * @param label The label of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setLabel(String label) { super.setLabel(label); return this; } /** * Sets the content type. By default, the content type is null. * * @param contentType The content type of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setContentType(String contentType) { super.setContentType(contentType); return this; } /** * Sets the ETag for this configuration setting. * * @param etag The ETag for the configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setETag(String etag) { super.setETag(etag); return this; } /** * Sets the tags for this configuration setting. * * @param tags The tags to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ @Override public FeatureFlagConfigurationSetting setTags(Map<String, String> tags) { super.setTags(tags); return this; } /** * Get the feature ID of this configuration setting. * * @return the feature ID of this configuration setting. */ public String getFeatureId() { return featureId; } /** * Set the feature ID of this configuration setting. * * @param featureId the feature ID of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setFeatureId(String featureId) { this.featureId = featureId; super.setKey(KEY_PREFIX + featureId); updateSettingValue(); return this; } /** * Get the boolean indicator to show if the setting is turn on or off. * * @return the boolean indicator to show if the setting is turn on or off. */ public boolean isEnabled() { return this.isEnabled; } /** * Set the boolean indicator to show if the setting is turn on or off. * * @param isEnabled the boolean indicator to show if the setting is turn on or off. * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setEnabled(boolean isEnabled) { this.isEnabled = isEnabled; updateSettingValue(); return this; } /** * Get the description of this configuration setting. * * @return the description of this configuration setting. */ public String getDescription() { return description; } /** * Set the description of this configuration setting. * * @param description the description of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setDescription(String description) { this.description = description; updateSettingValue(); return this; } /** * Get the display name of this configuration setting. * * @return the display name of this configuration setting. */ public String getDisplayName() { return displayName; } /** * Set the display name of this configuration setting. * * @param displayName the display name of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setDisplayName(String displayName) { this.displayName = displayName; updateSettingValue(); return this; } /** * Gets the feature flag filters of this configuration setting. * * @return the feature flag filters of this configuration setting. */ public List<FeatureFlagFilter> getClientFilters() { return clientFilters; } /** * Sets the feature flag filters of this configuration setting. * * @param clientFilters the feature flag filters of this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. * @throws IllegalArgumentException if the setting's {@code value} is an invalid JSON format. */ public FeatureFlagConfigurationSetting setClientFilters(List<FeatureFlagFilter> clientFilters) { this.clientFilters = clientFilters; updateSettingValue(); return this; } /** * Add a feature flag filter to this configuration setting. * * @param clientFilter a feature flag filter to add to this configuration setting. * * @return The updated {@link FeatureFlagConfigurationSetting} object. */ public FeatureFlagConfigurationSetting addClientFilter(FeatureFlagFilter clientFilter) { clientFilters.add(clientFilter); updateSettingValue(); return this; } }
Yes, there won't be an issue as we are using concurrent hashmap.
public static UrlBuilder parse(String url) { /* * Parsing the URL string into a UrlBuilder is a non-trivial operation and many calls into RestProxy will use * the same root URL string. To save CPU costs we retain a parsed version of the URL string in memory. Given * that UrlBuilder is mutable we must return a cloned version of the cached UrlBuilder. */ String concurrentSafeUrl = (url == null) ? "" : url; if (PARSED_URLS.size() >= MAX_CACHE_SIZE) { PARSED_URLS.clear(); } return PARSED_URLS.computeIfAbsent(concurrentSafeUrl, u -> new UrlBuilder().with(u, UrlTokenizerState.SCHEME_OR_HOST)).copy(); }
PARSED_URLS.clear();
public static UrlBuilder parse(String url) { /* * Parsing the URL string into a UrlBuilder is a non-trivial operation and many calls into RestProxy will use * the same root URL string. To save CPU costs we retain a parsed version of the URL string in memory. Given * that UrlBuilder is mutable we must return a cloned version of the cached UrlBuilder. */ String concurrentSafeUrl = (url == null) ? "" : url; if (PARSED_URLS.size() >= MAX_CACHE_SIZE) { PARSED_URLS.clear(); } return PARSED_URLS.computeIfAbsent(concurrentSafeUrl, u -> new UrlBuilder().with(u, UrlTokenizerState.SCHEME_OR_HOST)).copy(); }
class UrlBuilder { private static final Map<String, UrlBuilder> PARSED_URLS = new ConcurrentHashMap<>(); private static final int MAX_CACHE_SIZE = 10000; private String scheme; private String host; private String port; private String path; private final Map<String, String> query = new LinkedHashMap<>(); /** * Set the scheme/protocol that will be used to build the final URL. * * @param scheme The scheme/protocol that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setScheme(String scheme) { if (scheme == null || scheme.isEmpty()) { this.scheme = null; } else { with(scheme, UrlTokenizerState.SCHEME); } return this; } /** * Get the scheme/protocol that has been assigned to this UrlBuilder. * * @return the scheme/protocol that has been assigned to this UrlBuilder. */ public String getScheme() { return scheme; } /** * Set the host that will be used to build the final URL. * * @param host The host that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setHost(String host) { if (host == null || host.isEmpty()) { this.host = null; } else { with(host, UrlTokenizerState.SCHEME_OR_HOST); } return this; } /** * Get the host that has been assigned to this UrlBuilder. * * @return the host that has been assigned to this UrlBuilder. */ public String getHost() { return host; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(String port) { if (port == null || port.isEmpty()) { this.port = null; } else { with(port, UrlTokenizerState.PORT); } return this; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(int port) { return setPort(Integer.toString(port)); } /** * Get the port that has been assigned to this UrlBuilder. * * @return the port that has been assigned to this UrlBuilder. */ public Integer getPort() { return port == null ? null : Integer.valueOf(port); } /** * Set the path that will be used to build the final URL. * * @param path The path that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPath(String path) { if (path == null || path.isEmpty()) { this.path = null; } else { with(path, UrlTokenizerState.PATH); } return this; } /** * Get the path that has been assigned to this UrlBuilder. * * @return the path that has been assigned to this UrlBuilder. */ public String getPath() { return path; } /** * Set the provided query parameter name and encoded value to query string for the final URL. * * @param queryParameterName The name of the query parameter. * @param queryParameterEncodedValue The encoded value of the query parameter. * @return The provided query parameter name and encoded value to query string for the final URL. */ public UrlBuilder setQueryParameter(String queryParameterName, String queryParameterEncodedValue) { query.put(queryParameterName, queryParameterEncodedValue); return this; } /** * Set the query that will be used to build the final URL. * * @param query The query that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setQuery(String query) { if (query == null || query.isEmpty()) { this.query.clear(); } else { with(query, UrlTokenizerState.QUERY); } return this; } /** * Get the query that has been assigned to this UrlBuilder. * * @return the query that has been assigned to this UrlBuilder. */ public Map<String, String> getQuery() { return query; } /** * Returns the query string currently configured in this UrlBuilder instance. * @return A String containing the currently configured query string. */ public String getQueryString() { if (query.isEmpty()) { return ""; } StringBuilder queryBuilder = new StringBuilder("?"); for (Map.Entry<String, String> entry : query.entrySet()) { if (queryBuilder.length() > 1) { queryBuilder.append("&"); } queryBuilder.append(entry.getKey()); queryBuilder.append("="); queryBuilder.append(entry.getValue()); } return queryBuilder.toString(); } private UrlBuilder with(String text, UrlTokenizerState startState) { final UrlTokenizer tokenizer = new UrlTokenizer(text, startState); while (tokenizer.next()) { final UrlToken token = tokenizer.current(); final String tokenText = token.text(); final UrlTokenType tokenType = token.type(); switch (tokenType) { case SCHEME: scheme = emptyToNull(tokenText); break; case HOST: host = emptyToNull(tokenText); break; case PORT: port = emptyToNull(tokenText); break; case PATH: final String tokenPath = emptyToNull(tokenText); if (path == null || path.equals("/") || !tokenPath.equals("/")) { path = tokenPath; } break; case QUERY: String queryString = emptyToNull(tokenText); if (queryString != null) { if (queryString.startsWith("?")) { queryString = queryString.substring(1); } for (String entry : queryString.split("&")) { String[] nameValue = entry.split("="); if (nameValue.length == 2) { setQueryParameter(nameValue[0], nameValue[1]); } else { setQueryParameter(nameValue[0], ""); } } } break; default: break; } } return this; } /** * Get the URL that is being built. * * @return The URL that is being built. * @throws MalformedURLException if the URL is not fully formed. */ public URL toUrl() throws MalformedURLException { return new URL(toString()); } /** * Get the string representation of the URL that is being built. * * @return The string representation of the URL that is being built. */ @Override public String toString() { final StringBuilder result = new StringBuilder(); final boolean isAbsolutePath = path != null && (path.startsWith("http: if (!isAbsolutePath) { if (scheme != null) { result.append(scheme); if (!scheme.endsWith(": result.append(": } } if (host != null) { result.append(host); } } if (port != null) { result.append(":"); result.append(port); } if (path != null) { if (result.length() != 0 && !path.startsWith("/")) { result.append('/'); } result.append(path); } result.append(getQueryString()); return result.toString(); } /** * Returns the map of parsed URLs and their {@link UrlBuilder UrlBuilders} * @return the map of parsed URLs and their {@link UrlBuilder UrlBuilders} */ static Map<String, UrlBuilder> getParsedUrls() { return PARSED_URLS; } /** * Parses the passed {@code url} string into a UrlBuilder. * * @param url The URL string to parse. * @return The UrlBuilder that was created from parsing the passed URL string. */ /** * Parse a UrlBuilder from the provided URL object. * * @param url The URL object to parse. * @return The UrlBuilder that was parsed from the URL object. */ public static UrlBuilder parse(URL url) { final UrlBuilder result = new UrlBuilder(); if (url != null) { final String protocol = url.getProtocol(); if (protocol != null && !protocol.isEmpty()) { result.setScheme(protocol); } final String host = url.getHost(); if (host != null && !host.isEmpty()) { result.setHost(host); } final int port = url.getPort(); if (port != -1) { result.setPort(port); } final String path = url.getPath(); if (path != null && !path.isEmpty()) { result.setPath(path); } final String query = url.getQuery(); if (query != null && !query.isEmpty()) { result.setQuery(query); } } return result; } private static String emptyToNull(String value) { return value == null || value.isEmpty() ? null : value; } private UrlBuilder copy() { UrlBuilder copy = new UrlBuilder(); copy.scheme = this.scheme; copy.host = this.host; copy.path = this.path; copy.port = this.port; copy.query.putAll(this.query); return copy; } }
class UrlBuilder { private static final Map<String, UrlBuilder> PARSED_URLS = new ConcurrentHashMap<>(); private static final int MAX_CACHE_SIZE = 10000; private String scheme; private String host; private String port; private String path; private final Map<String, String> query = new LinkedHashMap<>(); /** * Set the scheme/protocol that will be used to build the final URL. * * @param scheme The scheme/protocol that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setScheme(String scheme) { if (scheme == null || scheme.isEmpty()) { this.scheme = null; } else { with(scheme, UrlTokenizerState.SCHEME); } return this; } /** * Get the scheme/protocol that has been assigned to this UrlBuilder. * * @return the scheme/protocol that has been assigned to this UrlBuilder. */ public String getScheme() { return scheme; } /** * Set the host that will be used to build the final URL. * * @param host The host that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setHost(String host) { if (host == null || host.isEmpty()) { this.host = null; } else { with(host, UrlTokenizerState.SCHEME_OR_HOST); } return this; } /** * Get the host that has been assigned to this UrlBuilder. * * @return the host that has been assigned to this UrlBuilder. */ public String getHost() { return host; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(String port) { if (port == null || port.isEmpty()) { this.port = null; } else { with(port, UrlTokenizerState.PORT); } return this; } /** * Set the port that will be used to build the final URL. * * @param port The port that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPort(int port) { return setPort(Integer.toString(port)); } /** * Get the port that has been assigned to this UrlBuilder. * * @return the port that has been assigned to this UrlBuilder. */ public Integer getPort() { return port == null ? null : Integer.valueOf(port); } /** * Set the path that will be used to build the final URL. * * @param path The path that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setPath(String path) { if (path == null || path.isEmpty()) { this.path = null; } else { with(path, UrlTokenizerState.PATH); } return this; } /** * Get the path that has been assigned to this UrlBuilder. * * @return the path that has been assigned to this UrlBuilder. */ public String getPath() { return path; } /** * Set the provided query parameter name and encoded value to query string for the final URL. * * @param queryParameterName The name of the query parameter. * @param queryParameterEncodedValue The encoded value of the query parameter. * @return The provided query parameter name and encoded value to query string for the final URL. */ public UrlBuilder setQueryParameter(String queryParameterName, String queryParameterEncodedValue) { query.put(queryParameterName, queryParameterEncodedValue); return this; } /** * Set the query that will be used to build the final URL. * * @param query The query that will be used to build the final URL. * @return This UrlBuilder so that multiple setters can be chained together. */ public UrlBuilder setQuery(String query) { if (query == null || query.isEmpty()) { this.query.clear(); } else { with(query, UrlTokenizerState.QUERY); } return this; } /** * Get the query that has been assigned to this UrlBuilder. * * @return the query that has been assigned to this UrlBuilder. */ public Map<String, String> getQuery() { return query; } /** * Returns the query string currently configured in this UrlBuilder instance. * @return A String containing the currently configured query string. */ public String getQueryString() { if (query.isEmpty()) { return ""; } StringBuilder queryBuilder = new StringBuilder("?"); for (Map.Entry<String, String> entry : query.entrySet()) { if (queryBuilder.length() > 1) { queryBuilder.append("&"); } queryBuilder.append(entry.getKey()); queryBuilder.append("="); queryBuilder.append(entry.getValue()); } return queryBuilder.toString(); } private UrlBuilder with(String text, UrlTokenizerState startState) { final UrlTokenizer tokenizer = new UrlTokenizer(text, startState); while (tokenizer.next()) { final UrlToken token = tokenizer.current(); final String tokenText = token.text(); final UrlTokenType tokenType = token.type(); switch (tokenType) { case SCHEME: scheme = emptyToNull(tokenText); break; case HOST: host = emptyToNull(tokenText); break; case PORT: port = emptyToNull(tokenText); break; case PATH: final String tokenPath = emptyToNull(tokenText); if (path == null || path.equals("/") || !tokenPath.equals("/")) { path = tokenPath; } break; case QUERY: String queryString = emptyToNull(tokenText); if (queryString != null) { if (queryString.startsWith("?")) { queryString = queryString.substring(1); } for (String entry : queryString.split("&")) { String[] nameValue = entry.split("="); if (nameValue.length == 2) { setQueryParameter(nameValue[0], nameValue[1]); } else { setQueryParameter(nameValue[0], ""); } } } break; default: break; } } return this; } /** * Get the URL that is being built. * * @return The URL that is being built. * @throws MalformedURLException if the URL is not fully formed. */ public URL toUrl() throws MalformedURLException { return new URL(toString()); } /** * Get the string representation of the URL that is being built. * * @return The string representation of the URL that is being built. */ @Override public String toString() { final StringBuilder result = new StringBuilder(); final boolean isAbsolutePath = path != null && (path.startsWith("http: if (!isAbsolutePath) { if (scheme != null) { result.append(scheme); if (!scheme.endsWith(": result.append(": } } if (host != null) { result.append(host); } } if (port != null) { result.append(":"); result.append(port); } if (path != null) { if (result.length() != 0 && !path.startsWith("/")) { result.append('/'); } result.append(path); } result.append(getQueryString()); return result.toString(); } /** * Returns the map of parsed URLs and their {@link UrlBuilder UrlBuilders} * @return the map of parsed URLs and their {@link UrlBuilder UrlBuilders} */ static Map<String, UrlBuilder> getParsedUrls() { return PARSED_URLS; } /** * Parses the passed {@code url} string into a UrlBuilder. * * @param url The URL string to parse. * @return The UrlBuilder that was created from parsing the passed URL string. */ /** * Parse a UrlBuilder from the provided URL object. * * @param url The URL object to parse. * @return The UrlBuilder that was parsed from the URL object. */ public static UrlBuilder parse(URL url) { final UrlBuilder result = new UrlBuilder(); if (url != null) { final String protocol = url.getProtocol(); if (protocol != null && !protocol.isEmpty()) { result.setScheme(protocol); } final String host = url.getHost(); if (host != null && !host.isEmpty()) { result.setHost(host); } final int port = url.getPort(); if (port != -1) { result.setPort(port); } final String path = url.getPath(); if (path != null && !path.isEmpty()) { result.setPath(path); } final String query = url.getQuery(); if (query != null && !query.isEmpty()) { result.setQuery(query); } } return result; } private static String emptyToNull(String value) { return value == null || value.isEmpty() ? null : value; } private UrlBuilder copy() { UrlBuilder copy = new UrlBuilder(); copy.scheme = this.scheme; copy.host = this.host; copy.path = this.path; copy.port = this.port; copy.query.putAll(this.query); return copy; } }