index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/startup/MiraklDocSchemaStartupCheckPrinter.java
package com.paypal.observability.mirakldocschecks.startup; import com.paypal.observability.miraklschemadiffs.startup.AbstractMiraklSchemaStartupCheckPrinter; import com.paypal.observability.startupchecks.model.StartupCheckProvider; import org.springframework.stereotype.Component; @Component public class MiraklDocSchemaStartupCheckPrinter extends AbstractMiraklSchemaStartupCheckPrinter { @SuppressWarnings("unchecked") @Override public Class<? extends StartupCheckProvider> getAssociatedStartupCheck() { return MiraklDocSchemaStartupCheckProvider.class; } }
5,700
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/startup/MiraklDocSchemaStartupCheckProvider.java
package com.paypal.observability.mirakldocschecks.startup; import com.paypal.observability.mirakldocschecks.services.MiraklDocSchemaCheckerService; import com.paypal.observability.miraklschemadiffs.startup.converters.MiraklSchemaStartupCheckConverter; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReport; import com.paypal.observability.startupchecks.model.StartupCheck; import com.paypal.observability.startupchecks.model.StartupCheckProvider; import org.springframework.stereotype.Component; @Component public class MiraklDocSchemaStartupCheckProvider implements StartupCheckProvider { public static final String STATUS_CHECK_DETAILS_DIFF_KEY = "diffs"; private final MiraklDocSchemaCheckerService miraklDocSchemaCheckerService; private final MiraklSchemaStartupCheckConverter miraklSchemaStartupCheckConverter; public MiraklDocSchemaStartupCheckProvider(final MiraklDocSchemaCheckerService miraklDocSchemaCheckerService, final MiraklSchemaStartupCheckConverter miraklSchemaStartupCheckConverter) { this.miraklDocSchemaCheckerService = miraklDocSchemaCheckerService; this.miraklSchemaStartupCheckConverter = miraklSchemaStartupCheckConverter; } @Override public StartupCheck check() { final MiraklSchemaDiffReport miraklDocSchemaDiffReport = miraklDocSchemaCheckerService.checkMiraklDocs(); return miraklSchemaStartupCheckConverter.startupCheckFrom(miraklDocSchemaDiffReport); } @Override public String getName() { return "miraklDocSchemaCheck"; } }
5,701
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/services/MiraklDocSchemaCheckerServiceImpl.java
package com.paypal.observability.mirakldocschecks.services; import com.mirakl.client.mmp.operator.domain.documents.MiraklDocumentsConfiguration; import com.paypal.observability.mirakldocschecks.connectors.MiraklDocSchemaConnector; import com.paypal.observability.mirakldocschecks.repository.MiraklDocSchemaRepository; import com.paypal.observability.mirakldocschecks.repository.model.MiraklDocSchemaYaml; import com.paypal.observability.mirakldocschecks.services.converters.MiraklDocSchemaConnectorConverter; import com.paypal.observability.mirakldocschecks.services.converters.MiraklDocSchemaRepositoryConverter; import com.paypal.observability.miraklschemadiffs.model.MiraklSchema; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiff; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReport; import com.paypal.observability.miraklschemadiffs.service.MiraklSchemaComparator; import com.paypal.observability.miraklschemadiffs.service.MiraklSchemaDiffReportBuilder; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.util.List; @Service public class MiraklDocSchemaCheckerServiceImpl implements MiraklDocSchemaCheckerService { @Value("${hmc.toggle-features.automated-kyc}") protected boolean isKycAutomated; private final MiraklSchemaComparator miraklSchemaComparator; private final MiraklSchemaDiffReportBuilder miraklSchemaDiffReportBuilder; private final MiraklDocSchemaConnector miraklDocSchemaConnector; private final MiraklDocSchemaRepository miraklDocSchemaRepository; private final MiraklDocSchemaRepositoryConverter miraklDocSchemaRepositoryConverter; private final MiraklDocSchemaConnectorConverter miraklDocSchemaConnectorConverter; public MiraklDocSchemaCheckerServiceImpl(final MiraklSchemaComparator miraklSchemaComparator, final MiraklSchemaDiffReportBuilder miraklSchemaDiffReportBuilder, final MiraklDocSchemaConnector miraklDocSchemaConnector, final MiraklDocSchemaRepository miraklDocSchemaRepository, final MiraklDocSchemaRepositoryConverter miraklDocSchemaRepositoryConverter, final MiraklDocSchemaConnectorConverter miraklDocSchemaConnectorConverter) { this.miraklSchemaComparator = miraklSchemaComparator; this.miraklSchemaDiffReportBuilder = miraklSchemaDiffReportBuilder; this.miraklDocSchemaConnector = miraklDocSchemaConnector; this.miraklDocSchemaRepository = miraklDocSchemaRepository; this.miraklDocSchemaRepositoryConverter = miraklDocSchemaRepositoryConverter; this.miraklDocSchemaConnectorConverter = miraklDocSchemaConnectorConverter; } @Override public MiraklSchemaDiffReport checkMiraklDocs() { return isKycAutomated ? getMiraklDocSchemaDiffReport() : new MiraklSchemaDiffReport(); } private MiraklSchemaDiffReport getMiraklDocSchemaDiffReport() { final List<MiraklDocumentsConfiguration> documents = miraklDocSchemaConnector.getShopDocumentConfigurations(); final MiraklSchema remoteDocSchema = miraklDocSchemaConnectorConverter.from(documents); final MiraklDocSchemaYaml miraklDocSchemaYaml = miraklDocSchemaRepository.loadCustomFieldsSchema(); final MiraklSchema expectedDocSchema = miraklDocSchemaRepositoryConverter.from(miraklDocSchemaYaml); final MiraklSchemaDiff miraklDocSchemaDiff = miraklSchemaComparator.compareSchemas(expectedDocSchema, remoteDocSchema); return miraklSchemaDiffReportBuilder.getSchemaReport(miraklDocSchemaDiff); } }
5,702
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/services/MiraklDocSchemaCheckerService.java
package com.paypal.observability.mirakldocschecks.services; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReport; public interface MiraklDocSchemaCheckerService { MiraklSchemaDiffReport checkMiraklDocs(); }
5,703
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/services
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/services/converters/MiraklDocSchemaConnectorConverter.java
package com.paypal.observability.mirakldocschecks.services.converters; import com.mirakl.client.mmp.operator.domain.documents.MiraklDocumentsConfiguration; import com.paypal.observability.mirakldocschecks.model.MiraklDoc; import com.paypal.observability.miraklschemadiffs.model.MiraklSchema; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import org.mapstruct.Mapper; import java.util.List; import java.util.stream.Collectors; @Mapper(componentModel = "spring") public interface MiraklDocSchemaConnectorConverter { MiraklDoc from(MiraklDocumentsConfiguration miraklDocumentsConfiguration); //@formatter:off default MiraklSchema from(final List<MiraklDocumentsConfiguration> miraklDocumentsConfigurations) { return new MiraklSchema(miraklDocumentsConfigurations.stream() .map(this::from) .map(MiraklSchemaItem.class::cast) .collect(Collectors.toList()), MiraklDoc.class); } //@formatter:on }
5,704
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/services
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/mirakldocschecks/services/converters/MiraklDocSchemaRepositoryConverter.java
package com.paypal.observability.mirakldocschecks.services.converters; import com.paypal.observability.mirakldocschecks.model.MiraklDoc; import com.paypal.observability.mirakldocschecks.repository.model.MiraklDocSchemaYaml; import com.paypal.observability.mirakldocschecks.repository.model.MiraklDocYaml; import com.paypal.observability.miraklschemadiffs.model.MiraklSchema; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import org.mapstruct.Mapper; import java.util.List; import java.util.stream.Collectors; @Mapper(componentModel = "spring") public interface MiraklDocSchemaRepositoryConverter { List<MiraklDoc> from(List<MiraklDocYaml> miraklDocYamls); MiraklDoc from(MiraklDocYaml miraklDocYaml); default MiraklSchema from(final MiraklDocSchemaYaml miraklDocSchemaYaml) { //@formatter:off return new MiraklSchema(from(miraklDocSchemaYaml.getDocuments()).stream() .map(MiraklSchemaItem.class::cast) .collect(Collectors.toList()), MiraklDoc.class); //@formatter:on } }
5,705
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/model/LoggingTransaction.java
package com.paypal.observability.loggingcontext.model; import com.fasterxml.jackson.databind.node.ObjectNode; public interface LoggingTransaction { String getId(); String getType(); String getSubtype(); ObjectNode toJson(); }
5,706
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service/TransactionContextRunnable.java
package com.paypal.observability.loggingcontext.service; @FunctionalInterface public interface TransactionContextRunnable { @SuppressWarnings("java:S112") void run() throws Throwable; }
5,707
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service/LoggingContextHolder.java
package com.paypal.observability.loggingcontext.service; import com.paypal.observability.loggingcontext.model.LoggingTransaction; import org.springframework.core.NamedThreadLocal; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class LoggingContextHolder { private final ThreadLocal<LoggingTransaction> businessTransactionInfoHolder = new NamedThreadLocal<>( "loggingTransactionContext"); public Optional<LoggingTransaction> getCurrentBusinessTransaction() { return Optional.ofNullable(businessTransactionInfoHolder.get()); } public void refreshBusinessTransaction(final LoggingTransaction loggingTransaction) { businessTransactionInfoHolder.set(loggingTransaction); } public void closeBusinessTransaction() { businessTransactionInfoHolder.remove(); } protected ThreadLocal<LoggingTransaction> getBusinessTransactionInfoHolder() { return businessTransactionInfoHolder; } }
5,708
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service/LoggingContextServiceImpl.java
package com.paypal.observability.loggingcontext.service; import com.paypal.observability.loggingcontext.model.LoggingTransaction; import com.paypal.observability.loggingcontext.service.serializer.LoggingTransactionSerializer; import org.slf4j.MDC; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class LoggingContextServiceImpl implements LoggingContextService { public static final String KEY_BUSINESS_TRANSACTION = "businessTransaction"; private final LoggingContextHolder loggingTransactionContext; private final LoggingTransactionSerializer loggingTransactionSerializer; public LoggingContextServiceImpl(final LoggingContextHolder loggingTransactionContext, final LoggingTransactionSerializer loggingTransactionSerializer) { this.loggingTransactionContext = loggingTransactionContext; this.loggingTransactionSerializer = loggingTransactionSerializer; } @Override public Optional<LoggingTransaction> getCurrentLoggingTransaction() { return loggingTransactionContext.getCurrentBusinessTransaction(); } @Override public void updateLoggingTransaction(final LoggingTransaction loggingTransaction) { loggingTransactionContext.refreshBusinessTransaction(loggingTransaction); MDC.put(KEY_BUSINESS_TRANSACTION, loggingTransactionSerializer.serialize(loggingTransaction)); } @Override public void closeLoggingTransaction() { loggingTransactionContext.closeBusinessTransaction(); MDC.clear(); } @Override public void executeInLoggingContext(final TransactionContextRunnable runnable, final LoggingTransaction loggingTransaction) throws Throwable { final LoggingTransaction currentTransaction = loggingTransactionContext.getCurrentBusinessTransaction() .orElse(loggingTransaction); updateLoggingTransaction(currentTransaction); runnable.run(); closeLoggingTransaction(); } }
5,709
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service/LoggingContextService.java
package com.paypal.observability.loggingcontext.service; import com.paypal.observability.loggingcontext.model.LoggingTransaction; import java.util.Optional; public interface LoggingContextService { Optional<LoggingTransaction> getCurrentLoggingTransaction(); void updateLoggingTransaction(LoggingTransaction loggingTransaction); void closeLoggingTransaction(); @SuppressWarnings("java:S112") void executeInLoggingContext(TransactionContextRunnable runnable, LoggingTransaction loggingTransaction) throws Throwable; }
5,710
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service/serializer/LoggingTransactionSerializer.java
package com.paypal.observability.loggingcontext.service.serializer; import com.paypal.observability.loggingcontext.model.LoggingTransaction; public interface LoggingTransactionSerializer { String serialize(LoggingTransaction loggingTransaction); }
5,711
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/loggingcontext/service/serializer/DefaultLoggingTransactionSerializer.java
package com.paypal.observability.loggingcontext.service.serializer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.paypal.observability.loggingcontext.model.LoggingTransaction; import org.springframework.stereotype.Component; @Component public class DefaultLoggingTransactionSerializer implements LoggingTransactionSerializer { @Override public String serialize(final LoggingTransaction loggingTransaction) { final ObjectMapper mapper = new ObjectMapper(); try { return mapper.writeValueAsString(loggingTransaction.toJson()); } catch (final JsonProcessingException e) { return ""; } } }
5,712
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging/model/BatchJobLoggingTransaction.java
package com.paypal.observability.batchjoblogging.model; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.paypal.observability.loggingcontext.model.LoggingTransaction; import lombok.Data; import lombok.NoArgsConstructor; @Data @NoArgsConstructor public class BatchJobLoggingTransaction implements LoggingTransaction { public static final String TRANSACTION_TYPE = "BatchJob"; private String id; private String subtype; private String itemType; private String itemId; public String getType() { return TRANSACTION_TYPE; } public BatchJobLoggingTransaction(final String id, final String itemType) { this.id = id; this.itemType = itemType; } @Override public ObjectNode toJson() { final ObjectMapper objectMapper = new ObjectMapper(); objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); return objectMapper.valueToTree(this); } }
5,713
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging/service/BatchJobLoggingContextServiceImpl.java
package com.paypal.observability.batchjoblogging.service; import com.paypal.jobsystem.batchjob.model.BatchJobContext; import com.paypal.jobsystem.batchjob.model.BatchJobItem; import com.paypal.observability.batchjoblogging.model.BatchJobLoggingTransaction; import com.paypal.observability.loggingcontext.model.LoggingTransaction; import com.paypal.observability.loggingcontext.service.LoggingContextService; import org.springframework.stereotype.Component; @Component public class BatchJobLoggingContextServiceImpl implements BatchJobLoggingContextService { private final LoggingContextService loggingContextService; public BatchJobLoggingContextServiceImpl(final LoggingContextService loggingContextService) { this.loggingContextService = loggingContextService; } @Override public void refreshBatchJobInformation(final BatchJobContext batchJobContext) { final BatchJobLoggingTransaction businessTransactionInfo = getOrCreateBatchJobBusinessTransactionInfo( batchJobContext); businessTransactionInfo.setItemType(null); businessTransactionInfo.setItemId(null); businessTransactionInfo.setSubtype(batchJobContext.getJobName()); loggingContextService.updateLoggingTransaction(businessTransactionInfo); } @Override public void refreshBatchJobInformation(final BatchJobContext batchJobContext, final BatchJobItem<?> item) { final BatchJobLoggingTransaction businessTransactionInfo = getOrCreateBatchJobBusinessTransactionInfo( batchJobContext); businessTransactionInfo.setItemType(item.getItemType()); businessTransactionInfo.setItemId(item.getItemId()); loggingContextService.updateLoggingTransaction(businessTransactionInfo); } @Override public void removeBatchJobItemInformation() { final BatchJobLoggingTransaction businessTransactionInfo = getOrCreateBatchJobBusinessTransactionInfo(); if (businessTransactionInfo != null) { businessTransactionInfo.setItemType(null); businessTransactionInfo.setItemId(null); loggingContextService.updateLoggingTransaction(businessTransactionInfo); } } @Override public void removeBatchJobInformation() { loggingContextService.closeLoggingTransaction(); } private BatchJobLoggingTransaction getOrCreateBatchJobBusinessTransactionInfo( final BatchJobContext batchJobContext) { //@formatter:off return (BatchJobLoggingTransaction) loggingContextService.getCurrentLoggingTransaction() .orElseGet(() -> createNewJobTransaction(batchJobContext)); //@formatter:on } private BatchJobLoggingTransaction createNewJobTransaction(final BatchJobContext batchJobContext) { final BatchJobLoggingTransaction newTransaction = new BatchJobLoggingTransaction(batchJobContext.getJobUuid(), batchJobContext.getJobName()); loggingContextService.updateLoggingTransaction(newTransaction); return newTransaction; } private BatchJobLoggingTransaction getOrCreateBatchJobBusinessTransactionInfo() { final LoggingTransaction loggingTransaction = loggingContextService.getCurrentLoggingTransaction().orElse(null); return loggingTransaction != null ? (BatchJobLoggingTransaction) loggingTransaction : null; } }
5,714
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging/service/BatchJobLoggingContextService.java
package com.paypal.observability.batchjoblogging.service; import com.paypal.jobsystem.batchjob.model.BatchJobContext; import com.paypal.jobsystem.batchjob.model.BatchJobItem; public interface BatchJobLoggingContextService { void refreshBatchJobInformation(BatchJobContext batchJobContext); void refreshBatchJobInformation(BatchJobContext batchJobContext, BatchJobItem<?> item); void removeBatchJobItemInformation(); void removeBatchJobInformation(); }
5,715
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/batchjoblogging/listeners/BatchJobLoggingListener.java
package com.paypal.observability.batchjoblogging.listeners; import com.paypal.jobsystem.batchjob.model.BatchJobContext; import com.paypal.jobsystem.batchjob.model.BatchJobItem; import com.paypal.jobsystem.batchjob.model.BatchJobItemValidationResult; import com.paypal.jobsystem.batchjob.support.AbstractBatchJobProcessingListener; import com.paypal.observability.batchjoblogging.service.BatchJobLoggingContextService; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; import java.util.Collection; /** * Logging batch job item processing listener. */ @Slf4j @Component public class BatchJobLoggingListener extends AbstractBatchJobProcessingListener { private final BatchJobLoggingContextService batchJobLoggingContextService; public BatchJobLoggingListener(final BatchJobLoggingContextService batchJobLoggingContextService) { this.batchJobLoggingContextService = batchJobLoggingContextService; } /** * {@inheritDoc} */ @Override public void beforeItemExtraction(final BatchJobContext ctx) { log.info("Starting extraction of items to be processed"); } /** * {@inheritDoc} */ @SuppressWarnings("java:S3655") @Override public void onItemExtractionSuccessful(final BatchJobContext ctx, final Collection<BatchJobItem<?>> extractedItems) { if (ctx.isPartialItemExtraction() && ctx.getNumberOfItemsNotSuccessfullyExtracted().isEmpty()) { log.warn( "Some of the items to be processed couldn't be retrieved. " + "Only the following number of items were retrieved and are going to be processed {}", ctx.getNumberOfItemsToBeProcessed()); } else if (ctx.isPartialItemExtraction() && ctx.getNumberOfItemsNotSuccessfullyExtracted().isPresent()) { log.warn( "Retrieved the following number of items to be processed: {}. " + "Additionally there are {} items that couldn't be retrieved and can't be processed", ctx.getNumberOfItemsToBeProcessed(), ctx.getNumberOfItemsNotSuccessfullyExtracted().get()); } else { log.info("Retrieved the following number of items to be processed: {}", ctx.getNumberOfItemsToBeProcessed()); } } /** * {@inheritDoc} */ @Override public void onItemExtractionFailure(final BatchJobContext ctx, final Exception e) { log.error("Failed retrieval of items", e); } /** * {@inheritDoc} */ @Override public void beforeProcessingItem(final BatchJobContext ctx, final BatchJobItem<?> item) { batchJobLoggingContextService.refreshBatchJobInformation(ctx, item); log.info("Processing item of type {} with id: {}", item.getItemType(), item.getItemId()); } /** * {@inheritDoc} */ @Override public void onItemProcessingFailure(final BatchJobContext ctx, final BatchJobItem<?> item, final Exception e) { log.error("Failed processing item of type %s with id: %s".formatted(item.getItemType(), item.getItemId()), e); logBatchProgress(ctx); batchJobLoggingContextService.removeBatchJobItemInformation(); } /** * {@inheritDoc} */ @Override public void onItemProcessingSuccess(final BatchJobContext ctx, final BatchJobItem<?> item) { log.info("Processed successfully item of type {} with id: {}", item.getItemType(), item.getItemId()); logBatchProgress(ctx); batchJobLoggingContextService.removeBatchJobItemInformation(); } /** * {@inheritDoc} */ @Override public void onItemProcessingValidationFailure(final BatchJobContext ctx, final BatchJobItem<?> item, final BatchJobItemValidationResult validationResult) { log.warn("Validation of item of type {} with id: {} has failed with the following message: {}", item.getItemType(), item.getItemId(), validationResult.getReason().orElse("")); } /** * {@inheritDoc} */ @Override public void onBatchJobStarted(final BatchJobContext ctx) { batchJobLoggingContextService.refreshBatchJobInformation(ctx); log.info("Starting processing of job"); } @Override public void onBatchJobFinished(final BatchJobContext ctx) { log.info("Finished processing of job"); batchJobLoggingContextService.removeBatchJobInformation(); } @Override public void onBatchJobFailure(final BatchJobContext ctx, final Exception e) { log.error("Job failed", e); batchJobLoggingContextService.removeBatchJobInformation(); } @SuppressWarnings("java:S3655") private void logBatchProgress(final BatchJobContext ctx) { log.info("{} items processed successfully. {} items failed. {} items remaining", ctx.getNumberOfItemsProcessed(), ctx.getNumberOfItemsFailed(), ctx.getNumberOfItemsRemaining()); if (ctx.getNumberOfItemsRemaining() == 0 && ctx.isPartialItemExtraction() && ctx.getNumberOfItemsNotSuccessfullyExtracted().isEmpty()) { log.warn("Not all items were able to be retrieved during the extraction phase," + " so there are additional items that couldn't be processed since they weren't retrieved."); } else if (ctx.getNumberOfItemsRemaining() == 0 && ctx.isPartialItemExtraction() && ctx.getNumberOfItemsNotSuccessfullyExtracted().isPresent()) { log.warn("Additionally there were {} items that couldn't be retrieved during the extraction phase," + " so they were not processed.", ctx.getNumberOfItemsNotSuccessfullyExtracted().get()); } } }
5,716
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/connectors/HyperwalletAPIHealthCheckConnector.java
package com.paypal.observability.hyperwalletapichecks.connectors; import com.hyperwallet.clientsdk.model.HyperwalletProgram; public interface HyperwalletAPIHealthCheckConnector { HyperwalletProgram getProgram(); }
5,717
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/connectors/HyperwalletAPIHealthCheckConnectorImpl.java
package com.paypal.observability.hyperwalletapichecks.connectors; import com.hyperwallet.clientsdk.model.HyperwalletProgram; import com.paypal.infrastructure.hyperwallet.services.UserHyperwalletSDKService; import org.springframework.stereotype.Component; @Component public class HyperwalletAPIHealthCheckConnectorImpl implements HyperwalletAPIHealthCheckConnector { private final UserHyperwalletSDKService userHyperwalletSDKService; public HyperwalletAPIHealthCheckConnectorImpl(final UserHyperwalletSDKService userHyperwalletSDKService) { this.userHyperwalletSDKService = userHyperwalletSDKService; } @Override public HyperwalletProgram getProgram() { return userHyperwalletSDKService.getRootProgram(); } }
5,718
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/model/HyperwalletAPICheck.java
package com.paypal.observability.hyperwalletapichecks.model; import lombok.Builder; import lombok.Value; @Builder @Value public class HyperwalletAPICheck { @Builder.Default private HyperwalletAPICheckStatus hyperwalletAPICheckStatus = HyperwalletAPICheckStatus.DOWN; private String error; private String location; public boolean isHealthy() { return !hyperwalletAPICheckStatus.equals(HyperwalletAPICheckStatus.DOWN); } }
5,719
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/model/HyperwalletAPICheckStatus.java
package com.paypal.observability.hyperwalletapichecks.model; public enum HyperwalletAPICheckStatus { UP, DOWN }
5,720
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/actuator/HyperwalletAPIHealthCheckHealthIndicator.java
package com.paypal.observability.hyperwalletapichecks.actuator; import com.paypal.observability.hyperwalletapichecks.actuator.converters.HyperwalletAPIHealthCheckActuatorConverter; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import com.paypal.observability.hyperwalletapichecks.services.HyperwalletHealthCheckService; import org.springframework.boot.actuate.health.Health; import org.springframework.boot.actuate.health.HealthIndicator; import org.springframework.stereotype.Component; @Component public class HyperwalletAPIHealthCheckHealthIndicator implements HealthIndicator { private final HyperwalletHealthCheckService hyperwalletHealthCheckService; private final HyperwalletAPIHealthCheckActuatorConverter hyperwalletAPIHealthCheckActuatorConverter; public HyperwalletAPIHealthCheckHealthIndicator(final HyperwalletHealthCheckService hyperwalletHealthCheckService, final HyperwalletAPIHealthCheckActuatorConverter hyperwalletAPIHealthCheckActuatorConverter) { this.hyperwalletHealthCheckService = hyperwalletHealthCheckService; this.hyperwalletAPIHealthCheckActuatorConverter = hyperwalletAPIHealthCheckActuatorConverter; } @Override public Health health() { final HyperwalletAPICheck hyperwalletAPICheck = hyperwalletHealthCheckService.check(); return hyperwalletAPIHealthCheckActuatorConverter.from(hyperwalletAPICheck); } }
5,721
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/actuator
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/actuator/converters/HyperwalletAPIHealthCheckActuatorConverter.java
package com.paypal.observability.hyperwalletapichecks.actuator.converters; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import org.springframework.boot.actuate.health.Health; public interface HyperwalletAPIHealthCheckActuatorConverter { Health from(HyperwalletAPICheck hyperwalletAPICheck); }
5,722
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/actuator
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/actuator/converters/HyperwalletAPIHealthCheckActuatorConverterImpl.java
package com.paypal.observability.hyperwalletapichecks.actuator.converters; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import org.springframework.boot.actuate.health.Health; import org.springframework.stereotype.Component; @Component public class HyperwalletAPIHealthCheckActuatorConverterImpl implements HyperwalletAPIHealthCheckActuatorConverter { @Override public Health from(final HyperwalletAPICheck hyperwalletAPICheck) { //@formatter:off if (hyperwalletAPICheck.isHealthy()) { return Health.up() .withDetail("location", hyperwalletAPICheck.getLocation()) .build(); } else { return Health.down() .withDetail("error", hyperwalletAPICheck.getError()) .withDetail("location", hyperwalletAPICheck.getLocation()) .build(); } //@formatter:on } }
5,723
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/startup/HyperwalletHealthCheckStartupProvider.java
package com.paypal.observability.hyperwalletapichecks.startup; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import com.paypal.observability.hyperwalletapichecks.services.HyperwalletHealthCheckService; import com.paypal.observability.hyperwalletapichecks.startup.converters.HyperwalletHealthStartupCheckConverter; import com.paypal.observability.startupchecks.model.StartupCheck; import com.paypal.observability.startupchecks.model.StartupCheckProvider; import org.springframework.stereotype.Component; @Component public class HyperwalletHealthCheckStartupProvider implements StartupCheckProvider { private final HyperwalletHealthCheckService hyperwalletHealthCheckService; private final HyperwalletHealthStartupCheckConverter miraklHealthCheckConverter; public HyperwalletHealthCheckStartupProvider(final HyperwalletHealthCheckService hyperwalletHealthCheckService, final HyperwalletHealthStartupCheckConverter miraklHealthCheckConverter) { this.hyperwalletHealthCheckService = hyperwalletHealthCheckService; this.miraklHealthCheckConverter = miraklHealthCheckConverter; } @Override public StartupCheck check() { final HyperwalletAPICheck hyperwalletAPICheck = hyperwalletHealthCheckService.check(); return miraklHealthCheckConverter.from(hyperwalletAPICheck); } @Override public String getName() { return "hyperwalletHealthCheck"; } }
5,724
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/startup/HyperwalletHealthCheckStartupCheckPrinter.java
package com.paypal.observability.hyperwalletapichecks.startup; import com.paypal.observability.startupchecks.model.StartupCheckPrinter; import com.paypal.observability.startupchecks.model.StartupCheckProvider; import org.springframework.stereotype.Component; @Component public class HyperwalletHealthCheckStartupCheckPrinter implements StartupCheckPrinter { @Override public Class<? extends StartupCheckProvider> getAssociatedStartupCheck() { return HyperwalletHealthCheckStartupProvider.class; } }
5,725
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/startup
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/startup/converters/HyperwalletHealthStartupCheckConverterImpl.java
package com.paypal.observability.hyperwalletapichecks.startup.converters; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheckStatus; import com.paypal.observability.startupchecks.model.StartupCheck; import com.paypal.observability.startupchecks.model.StartupCheckStatus; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class HyperwalletHealthStartupCheckConverterImpl implements HyperwalletHealthStartupCheckConverter { @Override public StartupCheck from(final HyperwalletAPICheck hyperwalletAPICheck) { //@formatter:off return StartupCheck.builder() .status(isHealthy(hyperwalletAPICheck) ? StartupCheckStatus.READY : StartupCheckStatus.READY_WITH_WARNINGS) .statusMessage(isHealthy(hyperwalletAPICheck) ? Optional.of("Hyperwallet API is accessible") : Optional.of("Hyperwallet API is not accessible")) .detail("status", isHealthy(hyperwalletAPICheck) ? "UP" : "DOWN") .detail("location", hyperwalletAPICheck.getLocation()) .detail("error", hyperwalletAPICheck.getError()) .build(); //@formatter:on } private boolean isHealthy(final HyperwalletAPICheck hyperwalletAPICheck) { return hyperwalletAPICheck.getHyperwalletAPICheckStatus().equals(HyperwalletAPICheckStatus.UP); } }
5,726
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/startup
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/startup/converters/HyperwalletHealthStartupCheckConverter.java
package com.paypal.observability.hyperwalletapichecks.startup.converters; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import com.paypal.observability.startupchecks.model.StartupCheck; public interface HyperwalletHealthStartupCheckConverter { StartupCheck from(HyperwalletAPICheck hyperwalletAPICheck); }
5,727
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/services/HyperwalletHealthCheckServiceImpl.java
package com.paypal.observability.hyperwalletapichecks.services; import com.hyperwallet.clientsdk.model.HyperwalletProgram; import com.paypal.observability.hyperwalletapichecks.connectors.HyperwalletAPIHealthCheckConnector; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import com.paypal.observability.hyperwalletapichecks.services.converters.HyperwalletAPIHealthCheckConnectorConverter; import org.springframework.stereotype.Component; @Component public class HyperwalletHealthCheckServiceImpl implements HyperwalletHealthCheckService { private final HyperwalletAPIHealthCheckConnector hyperwalletAPIHealthCheckConnector; private final HyperwalletAPIHealthCheckConnectorConverter hyperwalletAPIHealthCheckConnectorConverter; public HyperwalletHealthCheckServiceImpl( final HyperwalletAPIHealthCheckConnector hyperwalletAPIHealthCheckConnector, final HyperwalletAPIHealthCheckConnectorConverter hyperwalletAPIHealthCheckConnectorConverter) { this.hyperwalletAPIHealthCheckConnector = hyperwalletAPIHealthCheckConnector; this.hyperwalletAPIHealthCheckConnectorConverter = hyperwalletAPIHealthCheckConnectorConverter; } @Override public HyperwalletAPICheck check() { try { final HyperwalletProgram hyperwalletProgram = hyperwalletAPIHealthCheckConnector.getProgram(); return hyperwalletAPIHealthCheckConnectorConverter.from(hyperwalletProgram); } catch (final Exception e) { return hyperwalletAPIHealthCheckConnectorConverter.from(e); } } }
5,728
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/services/HyperwalletHealthCheckService.java
package com.paypal.observability.hyperwalletapichecks.services; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; public interface HyperwalletHealthCheckService { HyperwalletAPICheck check(); }
5,729
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/services
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/services/converters/HyperwalletAPIHealthCheckConnectorConverterImpl.java
package com.paypal.observability.hyperwalletapichecks.services.converters; import com.hyperwallet.clientsdk.model.HyperwalletProgram; import com.paypal.infrastructure.hyperwallet.configuration.HyperwalletConnectionConfiguration; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheckStatus; import org.springframework.stereotype.Component; @Component public class HyperwalletAPIHealthCheckConnectorConverterImpl implements HyperwalletAPIHealthCheckConnectorConverter { private final String hyperwalletEnvironment; public HyperwalletAPIHealthCheckConnectorConverterImpl(final HyperwalletConnectionConfiguration config) { this.hyperwalletEnvironment = config.getServer(); } @Override public HyperwalletAPICheck from(final HyperwalletProgram hyperwalletProgram) { return HyperwalletAPICheck.builder() .hyperwalletAPICheckStatus( isHealthy(hyperwalletProgram) ? HyperwalletAPICheckStatus.UP : HyperwalletAPICheckStatus.DOWN) .location(hyperwalletEnvironment).error(getError(hyperwalletProgram)).build(); //@formatter:on } @Override public HyperwalletAPICheck from(final Exception e) { //@formatter:off return HyperwalletAPICheck.builder() .hyperwalletAPICheckStatus(HyperwalletAPICheckStatus.DOWN) .location(hyperwalletEnvironment) .error(e.getMessage()) .build(); //@formatter:on } private boolean isHealthy(final HyperwalletProgram hyperwalletProgram) { return hyperwalletProgram != null && hyperwalletProgram.getName() != null; } String getError(final HyperwalletProgram hyperwalletProgram) { return hyperwalletProgram == null || hyperwalletProgram.getName() == null ? "Hyperwallet Health Check end point didn't return program info" : null; } }
5,730
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/services
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/hyperwalletapichecks/services/converters/HyperwalletAPIHealthCheckConnectorConverter.java
package com.paypal.observability.hyperwalletapichecks.services.converters; import com.hyperwallet.clientsdk.model.HyperwalletProgram; import com.paypal.observability.hyperwalletapichecks.model.HyperwalletAPICheck; public interface HyperwalletAPIHealthCheckConnectorConverter { HyperwalletAPICheck from(HyperwalletProgram hyperwalletProgram); HyperwalletAPICheck from(Exception e); }
5,731
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/connectors/MiraklFieldSchemaConnector.java
package com.paypal.observability.miraklfieldschecks.connectors; import com.mirakl.client.mmp.domain.additionalfield.MiraklFrontOperatorAdditionalField; import java.util.List; public interface MiraklFieldSchemaConnector { List<MiraklFrontOperatorAdditionalField> getShopCustomFields(); }
5,732
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/connectors/MiraklFieldSchemaConnectorImpl.java
package com.paypal.observability.miraklfieldschecks.connectors; import com.mirakl.client.mmp.domain.additionalfield.MiraklAdditionalFieldLinkedEntity; import com.mirakl.client.mmp.domain.additionalfield.MiraklFrontOperatorAdditionalField; import com.mirakl.client.mmp.operator.request.additionalfield.MiraklGetAdditionalFieldRequest; import com.paypal.infrastructure.mirakl.client.MiraklClient; import org.springframework.stereotype.Component; import java.util.List; @Component public class MiraklFieldSchemaConnectorImpl implements MiraklFieldSchemaConnector { private final MiraklClient miraklOperatorClient; public MiraklFieldSchemaConnectorImpl(final MiraklClient miraklOperatorClient) { this.miraklOperatorClient = miraklOperatorClient; } @Override public List<MiraklFrontOperatorAdditionalField> getShopCustomFields() { final MiraklGetAdditionalFieldRequest miraklGetAdditionalFieldRequest = new MiraklGetAdditionalFieldRequest( MiraklAdditionalFieldLinkedEntity.SHOP); return miraklOperatorClient.getAdditionalFields(miraklGetAdditionalFieldRequest); } }
5,733
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/MiraklFieldSchemaRepositoryImpl.java
package com.paypal.observability.miraklfieldschecks.repository; import com.paypal.infrastructure.support.exceptions.HMCException; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldSchemaYaml; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklSchemaGroupYaml; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.core.io.Resource; import org.springframework.stereotype.Service; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.constructor.Constructor; import java.io.InputStream; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @Slf4j @Service public class MiraklFieldSchemaRepositoryImpl implements MiraklFieldSchemaRepository { @Value("classpath:mirakl/customfield-schemas/*") private Resource[] resources; @Override public MiraklFieldSchemaYaml loadCustomFieldsSchema(final boolean includeKycRequiredFields) { return new MiraklFieldSchemaYaml(loadCustomFieldGroups(includeKycRequiredFields)); } private List<MiraklSchemaGroupYaml> loadCustomFieldGroups(final boolean includeKycRequiredFields) { return Arrays.stream(resources).map(this::loadYaml) .filter(g -> includeKycRequiredFields || Boolean.FALSE.equals(g.getMetadata().getRequiredForKyc())) .collect(Collectors.toList()); } private MiraklSchemaGroupYaml loadYaml(final Resource resource) { final Yaml yaml = new Yaml(new Constructor(MiraklSchemaGroupYaml.class)); try (final InputStream is = resource.getInputStream()) { return yaml.load(is); } catch (final Exception e) { throw new HMCException("Couldn't load custom field schema from file: %s".formatted(resource.getFilename()), e); } } }
5,734
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/MiraklFieldSchemaRepository.java
package com.paypal.observability.miraklfieldschecks.repository; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldSchemaYaml; public interface MiraklFieldSchemaRepository { MiraklFieldSchemaYaml loadCustomFieldsSchema(boolean includeKycRequiredFields); }
5,735
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/model/MiraklFieldSchemaGroupMetadataYaml.java
package com.paypal.observability.miraklfieldschecks.repository.model; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @Data @Builder @AllArgsConstructor @NoArgsConstructor public class MiraklFieldSchemaGroupMetadataYaml { private String owner; private String shopType; private String group; private Boolean requiredForKyc; }
5,736
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/model/MiraklFieldTypeYaml.java
package com.paypal.observability.miraklfieldschecks.repository.model; public enum MiraklFieldTypeYaml { TEXT, DATE, NUMERIC, BOOLEAN, LINK, REGULAR_EXPRESSION, TEXT_AREA, SINGLE_VALUE_LIST, MULTIPLE_VALUES_LIST }
5,737
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/model/MiraklSchemaGroupYaml.java
package com.paypal.observability.miraklfieldschecks.repository.model; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import java.util.List; @Data @Builder @AllArgsConstructor @NoArgsConstructor public class MiraklSchemaGroupYaml { private MiraklFieldSchemaGroupMetadataYaml metadata; private String label; private String description; private List<MiraklFieldYaml> fields; }
5,738
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/model/MiraklFieldPermissionsYaml.java
package com.paypal.observability.miraklfieldschecks.repository.model; public enum MiraklFieldPermissionsYaml { INVISIBLE, READ_ONLY, READ_WRITE }
5,739
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/model/MiraklFieldSchemaYaml.java
package com.paypal.observability.miraklfieldschecks.repository.model; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import java.util.List; @Data @Builder @AllArgsConstructor @NoArgsConstructor public class MiraklFieldSchemaYaml { private List<MiraklSchemaGroupYaml> customFieldGroups; }
5,740
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/repository/model/MiraklFieldYaml.java
package com.paypal.observability.miraklfieldschecks.repository.model; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import java.util.List; @Data @Builder @AllArgsConstructor @NoArgsConstructor public class MiraklFieldYaml { private String label; private String code; private String description; private MiraklFieldTypeYaml type; private MiraklFieldPermissionsYaml permissions; private Boolean required; private String regexpPattern; private List<String> allowedValues; }
5,741
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/model/MiraklField.java
package com.paypal.observability.miraklfieldschecks.model; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import lombok.Builder; import lombok.Value; import java.util.ArrayList; import java.util.List; @Value @Builder public class MiraklField implements MiraklSchemaItem { private String label; private String code; private String description; private MiraklFieldType type; private MiraklFieldPermissions permissions; @Builder.Default private Boolean required = Boolean.FALSE; private String regexpPattern; @Builder.Default private List<String> allowedValues = new ArrayList<>(); }
5,742
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/model/MiraklFieldPermissions.java
package com.paypal.observability.miraklfieldschecks.model; public enum MiraklFieldPermissions { INVISIBLE, READ_ONLY, READ_WRITE }
5,743
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/model/MiraklFieldType.java
package com.paypal.observability.miraklfieldschecks.model; public enum MiraklFieldType { TEXT, DATE, NUMERIC, BOOLEAN, LINK, REGULAR_EXPRESSION, TEXT_AREA, SINGLE_VALUE_LIST, MULTIPLE_VALUES_LIST }
5,744
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/diffs/IncorrectRegexpDiffEvaluator.java
package com.paypal.observability.miraklfieldschecks.diffs; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntry; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryIncorrectAttributeValue; import com.paypal.observability.miraklschemadiffs.model.diffevaluators.MiraklSchemaItemDiffEvaluator; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class IncorrectRegexpDiffEvaluator implements MiraklSchemaItemDiffEvaluator { @Override public Optional<MiraklSchemaDiffEntry> check(final MiraklSchemaItem expected, final MiraklSchemaItem actual) { final MiraklField expectedField = (MiraklField) expected; final MiraklField actualField = (MiraklField) actual; final String expectedRegexpPattern = expectedField.getRegexpPattern() != null ? expectedField.getRegexpPattern() : ""; final String actualRegexpPattern = actualField.getRegexpPattern() != null ? actualField.getRegexpPattern() : ""; if (!expectedRegexpPattern.equals(actualRegexpPattern)) { return Optional.of(new MiraklSchemaDiffEntryIncorrectAttributeValue(expected, actual, "regexp")); } return Optional.empty(); } @Override public Class<? extends MiraklSchemaItem> targetClass() { return MiraklField.class; } }
5,745
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/diffs/IncorrectPermissionsDiffEvaluator.java
package com.paypal.observability.miraklfieldschecks.diffs; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntry; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryIncorrectAttributeValue; import com.paypal.observability.miraklschemadiffs.model.diffevaluators.MiraklSchemaItemDiffEvaluator; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class IncorrectPermissionsDiffEvaluator implements MiraklSchemaItemDiffEvaluator { @Override public Optional<MiraklSchemaDiffEntry> check(final MiraklSchemaItem expected, final MiraklSchemaItem actual) { if (!((MiraklField) expected).getPermissions().equals(((MiraklField) actual).getPermissions())) { return Optional.of(new MiraklSchemaDiffEntryIncorrectAttributeValue(expected, actual, "permissions")); } return Optional.empty(); } @Override public Class<? extends MiraklSchemaItem> targetClass() { return MiraklField.class; } }
5,746
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/diffs/IncorrectRequiredDiffEvaluator.java
package com.paypal.observability.miraklfieldschecks.diffs; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntry; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryIncorrectAttributeValue; import com.paypal.observability.miraklschemadiffs.model.diffevaluators.MiraklSchemaItemDiffEvaluator; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class IncorrectRequiredDiffEvaluator implements MiraklSchemaItemDiffEvaluator { @Override public Optional<MiraklSchemaDiffEntry> check(final MiraklSchemaItem expected, final MiraklSchemaItem actual) { final MiraklField expectedField = (MiraklField) expected; final MiraklField actualField = (MiraklField) actual; final Boolean expectedRequired = expectedField.getRequired() != null ? expectedField.getRequired() : Boolean.FALSE; if (!expectedRequired.equals(actualField.getRequired())) { return Optional.of(new MiraklSchemaDiffEntryIncorrectAttributeValue(expected, actual, "required")); } return Optional.empty(); } @Override public Class<? extends MiraklSchemaItem> targetClass() { return MiraklField.class; } }
5,747
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/diffs/IncorrectTypeDiffEvaluator.java
package com.paypal.observability.miraklfieldschecks.diffs; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntry; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryIncorrectAttributeValue; import com.paypal.observability.miraklschemadiffs.model.diffevaluators.MiraklSchemaItemDiffEvaluator; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class IncorrectTypeDiffEvaluator implements MiraklSchemaItemDiffEvaluator { @Override public Optional<MiraklSchemaDiffEntry> check(final MiraklSchemaItem expected, final MiraklSchemaItem actual) { if (!((MiraklField) expected).getType().equals(((MiraklField) actual).getType())) { return Optional.of(new MiraklSchemaDiffEntryIncorrectAttributeValue(expected, actual, "type")); } return Optional.empty(); } @Override public Class<? extends MiraklSchemaItem> targetClass() { return MiraklField.class; } }
5,748
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/diffs/IncorrectAllowedValuesDiffEvaluator.java
package com.paypal.observability.miraklfieldschecks.diffs; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntry; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryIncorrectAttributeValue; import com.paypal.observability.miraklschemadiffs.model.diffevaluators.MiraklSchemaItemDiffEvaluator; import org.springframework.stereotype.Component; import java.util.Optional; @Component public class IncorrectAllowedValuesDiffEvaluator implements MiraklSchemaItemDiffEvaluator { @Override public Optional<MiraklSchemaDiffEntry> check(final MiraklSchemaItem expected, final MiraklSchemaItem actual) { if (!hasSameAllowedValues((MiraklField) expected, (MiraklField) actual)) { return Optional.of(new MiraklSchemaDiffEntryIncorrectAttributeValue(expected, actual, "allowedValues")); } return Optional.empty(); } @Override public Class<? extends MiraklSchemaItem> targetClass() { return MiraklField.class; } private boolean hasSameAllowedValues(final MiraklField expected, final MiraklField actual) { return expected.getAllowedValues().isEmpty() || checkAllowedValues(expected, actual); } private boolean checkAllowedValues(final MiraklField expected, final MiraklField actual) { return expected.getAllowedValues().containsAll(actual.getAllowedValues()) && actual.getAllowedValues().containsAll(expected.getAllowedValues()); } }
5,749
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/diffs/MiraklFieldSchemaDiffEntrySeverityAssigner.java
package com.paypal.observability.miraklfieldschecks.diffs; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklfieldschecks.model.MiraklFieldPermissions; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntry; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryIncorrectAttributeValue; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiffEntryType; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffEntrySeverityAssigner; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReportSeverity; import org.springframework.stereotype.Component; import java.util.Map; @Component public class MiraklFieldSchemaDiffEntrySeverityAssigner implements MiraklSchemaDiffEntrySeverityAssigner { //@formatter:off private final Map<String, MiraklSchemaDiffReportSeverity> fieldNameToSeverityMap = Map.of( "type", MiraklSchemaDiffReportSeverity.FAIL, "label", MiraklSchemaDiffReportSeverity.WARN, "permissions", MiraklSchemaDiffReportSeverity.FAIL, "regexp", MiraklSchemaDiffReportSeverity.FAIL, "allowedValues", MiraklSchemaDiffReportSeverity.FAIL, "description", MiraklSchemaDiffReportSeverity.WARN, "required", MiraklSchemaDiffReportSeverity.WARN ); // We assign a numeric value to each permission representing the amount of rights // for each of them. The bigger the value the more rights it grants. // This is going to be used to compare the amount of rights granted by to different // permissions. private final Map<MiraklFieldPermissions, Integer> permissionsLevelMap = Map.of( MiraklFieldPermissions.INVISIBLE, 0, MiraklFieldPermissions.READ_ONLY, 1, MiraklFieldPermissions.READ_WRITE, 2 ); //@formatter:on @Override public MiraklSchemaDiffReportSeverity getSeverityFor(final MiraklSchemaDiffEntry entry) { MiraklSchemaDiffReportSeverity severity = MiraklSchemaDiffReportSeverity.WARN; if (MiraklSchemaDiffEntryType.ITEM_NOT_FOUND.equals(entry.getDiffType())) { severity = MiraklSchemaDiffReportSeverity.FAIL; } else if (MiraklSchemaDiffEntryType.INCORRECT_ATTRIBUTE_VALUE.equals(entry.getDiffType())) { final MiraklSchemaDiffEntryIncorrectAttributeValue incorrectAttributeEntry = ((MiraklSchemaDiffEntryIncorrectAttributeValue) entry); final String fieldName = incorrectAttributeEntry.getAttributeName(); severity = fieldName.equals("permissions") ? getPermissionsAttributeSeverity(incorrectAttributeEntry) : getGenericAttributeSeverity(incorrectAttributeEntry); } return severity; } private MiraklSchemaDiffReportSeverity getPermissionsAttributeSeverity( final MiraklSchemaDiffEntryIncorrectAttributeValue diffEntry) { final MiraklFieldPermissions actual = ((MiraklField) diffEntry.getActual()).getPermissions(); final MiraklFieldPermissions expected = ((MiraklField) diffEntry.getExpected()).getPermissions(); return permissionsLevelMap.get(actual) >= permissionsLevelMap.get(expected) ? MiraklSchemaDiffReportSeverity.FAIL : MiraklSchemaDiffReportSeverity.WARN; } private MiraklSchemaDiffReportSeverity getGenericAttributeSeverity( final MiraklSchemaDiffEntryIncorrectAttributeValue diffEntry) { final String fieldName = diffEntry.getAttributeName(); return fieldNameToSeverityMap.get(fieldName); } @Override public Class<? extends MiraklSchemaItem> getTargetSchemaType() { return MiraklField.class; } }
5,750
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/startup/MiraklFieldSchemaStartupCheckPrinter.java
package com.paypal.observability.miraklfieldschecks.startup; import com.paypal.observability.miraklschemadiffs.startup.AbstractMiraklSchemaStartupCheckPrinter; import com.paypal.observability.startupchecks.model.StartupCheckProvider; import org.springframework.stereotype.Component; @Component public class MiraklFieldSchemaStartupCheckPrinter extends AbstractMiraklSchemaStartupCheckPrinter { @Override public Class<? extends StartupCheckProvider> getAssociatedStartupCheck() { return MiraklFieldSchemaStartupCheckProvider.class; } }
5,751
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/startup/MiraklFieldSchemaStartupCheckProvider.java
package com.paypal.observability.miraklfieldschecks.startup; import com.paypal.observability.miraklfieldschecks.services.MiraklFieldSchemaCheckerService; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReport; import com.paypal.observability.miraklschemadiffs.startup.converters.MiraklSchemaStartupCheckConverter; import com.paypal.observability.startupchecks.model.StartupCheck; import com.paypal.observability.startupchecks.model.StartupCheckProvider; import org.springframework.stereotype.Component; @Component public class MiraklFieldSchemaStartupCheckProvider implements StartupCheckProvider { public static final String STATUS_CHECK_DETAILS_DIFF_KEY = "diffs"; private final MiraklFieldSchemaCheckerService miraklFieldSchemaCheckerService; private final MiraklSchemaStartupCheckConverter miraklSchemaStartupCheckConverter; public MiraklFieldSchemaStartupCheckProvider(final MiraklFieldSchemaCheckerService miraklFieldSchemaCheckerService, final MiraklSchemaStartupCheckConverter miraklSchemaStartupCheckConverter) { this.miraklFieldSchemaCheckerService = miraklFieldSchemaCheckerService; this.miraklSchemaStartupCheckConverter = miraklSchemaStartupCheckConverter; } @Override public StartupCheck check() { final MiraklSchemaDiffReport miraklFieldSchemaDiffReport = miraklFieldSchemaCheckerService.checkMiraklSchema(); return miraklSchemaStartupCheckConverter.startupCheckFrom(miraklFieldSchemaDiffReport); } @Override public String getName() { return "miraklCustomFieldsSchemaCheck"; } }
5,752
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/services/MiraklFieldSchemaCheckerService.java
package com.paypal.observability.miraklfieldschecks.services; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReport; public interface MiraklFieldSchemaCheckerService { MiraklSchemaDiffReport checkMiraklSchema(); }
5,753
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/services/MiraklFieldSchemaCheckerServiceImpl.java
package com.paypal.observability.miraklfieldschecks.services; import com.mirakl.client.mmp.domain.additionalfield.MiraklFrontOperatorAdditionalField; import com.paypal.observability.miraklfieldschecks.repository.MiraklFieldSchemaRepository; import com.paypal.observability.miraklfieldschecks.services.converters.MiraklFieldSchemaConnectorConverter; import com.paypal.observability.miraklfieldschecks.services.converters.MiraklFieldSchemaRepositoryConverter; import com.paypal.observability.miraklfieldschecks.connectors.MiraklFieldSchemaConnector; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldSchemaYaml; import com.paypal.observability.miraklschemadiffs.model.MiraklSchema; import com.paypal.observability.miraklschemadiffs.model.diff.MiraklSchemaDiff; import com.paypal.observability.miraklschemadiffs.model.report.MiraklSchemaDiffReport; import com.paypal.observability.miraklschemadiffs.service.MiraklSchemaComparator; import com.paypal.observability.miraklschemadiffs.service.MiraklSchemaDiffReportBuilder; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.util.List; @Service public class MiraklFieldSchemaCheckerServiceImpl implements MiraklFieldSchemaCheckerService { @Value("${hmc.toggle-features.automated-kyc}") protected boolean isKycAutomated; private final MiraklSchemaComparator miraklFieldSchemaComparator; private final MiraklSchemaDiffReportBuilder miraklFieldSchemaDiffReportBuilder; private final MiraklFieldSchemaConnector miraklFieldSchemaConnector; private final MiraklFieldSchemaRepository miraklFieldSchemaRepository; private final MiraklFieldSchemaRepositoryConverter miraklFieldSchemaRepositoryConverter; private final MiraklFieldSchemaConnectorConverter miraklFieldSchemaConnectorConverter; public MiraklFieldSchemaCheckerServiceImpl(final MiraklSchemaComparator miraklFieldSchemaComparator, final MiraklSchemaDiffReportBuilder miraklFieldSchemaDiffReportBuilder, final MiraklFieldSchemaConnector miraklFieldSchemaConnector, final MiraklFieldSchemaRepository miraklFieldSchemaRepository, final MiraklFieldSchemaRepositoryConverter miraklFieldSchemaRepositoryConverter, final MiraklFieldSchemaConnectorConverter miraklFieldSchemaConnectorConverter) { this.miraklFieldSchemaComparator = miraklFieldSchemaComparator; this.miraklFieldSchemaDiffReportBuilder = miraklFieldSchemaDiffReportBuilder; this.miraklFieldSchemaConnector = miraklFieldSchemaConnector; this.miraklFieldSchemaRepository = miraklFieldSchemaRepository; this.miraklFieldSchemaRepositoryConverter = miraklFieldSchemaRepositoryConverter; this.miraklFieldSchemaConnectorConverter = miraklFieldSchemaConnectorConverter; } @Override public MiraklSchemaDiffReport checkMiraklSchema() { final List<MiraklFrontOperatorAdditionalField> miraklFields = miraklFieldSchemaConnector.getShopCustomFields(); final MiraklSchema remoteSchema = miraklFieldSchemaConnectorConverter.from(miraklFields); final MiraklFieldSchemaYaml miraklFieldSchemaYaml = miraklFieldSchemaRepository .loadCustomFieldsSchema(isKycAutomated); final MiraklSchema expectedSchema = miraklFieldSchemaRepositoryConverter.from(miraklFieldSchemaYaml); final MiraklSchemaDiff miraklFieldSchemaDiff = miraklFieldSchemaComparator.compareSchemas(expectedSchema, remoteSchema); return miraklFieldSchemaDiffReportBuilder.getSchemaReport(miraklFieldSchemaDiff); } }
5,754
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/services
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/services/converters/MiraklFieldSchemaRepositoryConverter.java
package com.paypal.observability.miraklfieldschecks.services.converters; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklfieldschecks.model.MiraklFieldPermissions; import com.paypal.observability.miraklfieldschecks.model.MiraklFieldType; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldPermissionsYaml; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldSchemaYaml; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldTypeYaml; import com.paypal.observability.miraklfieldschecks.repository.model.MiraklFieldYaml; import com.paypal.observability.miraklschemadiffs.model.MiraklSchema; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import org.mapstruct.Mapper; import java.util.List; import java.util.stream.Collectors; @Mapper(componentModel = "spring") public interface MiraklFieldSchemaRepositoryConverter { List<MiraklField> from(List<MiraklFieldYaml> miraklFieldYaml); MiraklField from(MiraklFieldYaml miraklFieldYaml); MiraklFieldType from(MiraklFieldTypeYaml miraklFieldTypeYaml); MiraklFieldPermissions from(MiraklFieldPermissionsYaml miraklFieldPermissionsYaml); //@formatter:off default MiraklSchema from(final MiraklFieldSchemaYaml miraklFieldSchemaYaml) { return new MiraklSchema(miraklFieldSchemaYaml.getCustomFieldGroups() .stream() .flatMap(x -> x.getFields().stream()) .map(this::from) .map(MiraklSchemaItem.class::cast) .collect(Collectors.toList()), MiraklField.class); } //@formatter:on }
5,755
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/services
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/java/com/paypal/observability/miraklfieldschecks/services/converters/MiraklFieldSchemaConnectorConverter.java
package com.paypal.observability.miraklfieldschecks.services.converters; import com.mirakl.client.mmp.domain.additionalfield.FieldPermission; import com.mirakl.client.mmp.domain.additionalfield.MiraklAdditionalFieldType; import com.mirakl.client.mmp.domain.additionalfield.MiraklFrontOperatorAdditionalField; import com.paypal.observability.miraklfieldschecks.model.MiraklField; import com.paypal.observability.miraklfieldschecks.model.MiraklFieldPermissions; import com.paypal.observability.miraklfieldschecks.model.MiraklFieldType; import com.paypal.observability.miraklschemadiffs.model.MiraklSchema; import com.paypal.observability.miraklschemadiffs.model.MiraklSchemaItem; import org.mapstruct.Mapper; import org.mapstruct.Mapping; import org.mapstruct.ValueMapping; import java.util.List; import java.util.stream.Collectors; @Mapper(componentModel = "spring") public interface MiraklFieldSchemaConnectorConverter { List<MiraklField> miraklFieldListFrom(List<MiraklFrontOperatorAdditionalField> miraklAdditionalFields); @Mapping(target = "allowedValues", source = "acceptedValues") @Mapping(target = "regexpPattern", source = "regex") @Mapping(target = "permissions", source = "shopPermission") MiraklField from(MiraklFrontOperatorAdditionalField miraklAdditionalField); @ValueMapping(target = "TEXT", source = "STRING") @ValueMapping(target = "TEXT_AREA", source = "TEXTAREA") @ValueMapping(target = "REGULAR_EXPRESSION", source = "REGEX") @ValueMapping(target = "SINGLE_VALUE_LIST", source = "LIST") MiraklFieldType from(MiraklAdditionalFieldType miraklAdditionalFieldType); MiraklFieldPermissions from(FieldPermission miraklFieldPermission); //@formatter:off default MiraklSchema from(final List<MiraklFrontOperatorAdditionalField> miraklAdditionalFields) { return new MiraklSchema(miraklAdditionalFields.stream() .map(this::from) .map(MiraklSchemaItem.class::cast) .collect(Collectors.toList()), MiraklField.class); } //@formatter:on }
5,756
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/aspectj/com/paypal/observability/trafficauditor
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/aspectj/com/paypal/observability/trafficauditor/instrumentation/MiraklHttpClientConfigurationAspect.java
package com.paypal.observability.trafficauditor.instrumentation; import com.paypal.observability.trafficauditor.interceptors.mirakl.ApacheHttpRequestInterceptor; import com.paypal.observability.trafficauditor.interceptors.mirakl.ApacheHttpResponseInterceptor; import org.apache.http.impl.client.HttpClientBuilder; import org.aspectj.lang.annotation.AfterReturning; import org.aspectj.lang.annotation.Aspect; import org.slf4j.Logger; import static org.slf4j.LoggerFactory.getLogger; @Aspect public class MiraklHttpClientConfigurationAspect { private static final Logger logger = getLogger(MiraklHttpClientConfigurationAspect.class); @AfterReturning( pointcut = "execution(* com.mirakl.client.core.AbstractMiraklApiClient.setDefaultConfiguration(..)) && args(httpClientBuilder,..)", argNames = "httpClientBuilder") public void exit(final HttpClientBuilder httpClientBuilder) { try { httpClientBuilder.addInterceptorLast(ApacheHttpRequestInterceptor.get()) .addInterceptorLast(ApacheHttpResponseInterceptor.get()); } catch (final Exception e) { logger.trace("Error while intercepting Mirakl SDK instantiation", e); } } }
5,757
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/aspectj/com/paypal/observability/trafficauditor
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/aspectj/com/paypal/observability/trafficauditor/instrumentation/HyperwalletRequestAspect.java
package com.paypal.observability.trafficauditor.instrumentation; import cc.protea.util.http.Response; import com.hyperwallet.clientsdk.util.Request; import com.paypal.observability.trafficauditor.adapters.TrafficAuditorAdapter; import com.paypal.observability.trafficauditor.adapters.hyperwallet.HyperwalletTrafficAuditorAdapter; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.slf4j.Logger; import static org.slf4j.LoggerFactory.getLogger; @Aspect public class HyperwalletRequestAspect { private static final Logger logger = getLogger(HyperwalletRequestAspect.class); @Around("execution(* com.hyperwallet.clientsdk.util.Request.getResource(..))") public Object auditGetResource(final ProceedingJoinPoint pjp) throws Throwable { return auditOperation(pjp); } @Around("execution(* com.hyperwallet.clientsdk.util.Request.postResource(..))") public Object auditPostResource(final ProceedingJoinPoint pjp) throws Throwable { return auditOperation(pjp); } @Around("execution(* com.hyperwallet.clientsdk.util.Request.putResource(..))") public Object auditPutResource(final ProceedingJoinPoint pjp) throws Throwable { return auditOperation(pjp); } @SuppressWarnings("java:S3252") private Response auditOperation(final ProceedingJoinPoint pjp) throws Throwable { final TrafficAuditorAdapter<Request, Response> trafficAuditorAdapter = HyperwalletTrafficAuditorAdapter.get(); final Request self = (Request) pjp.getThis(); try { final Response result = (Response) pjp.proceed(); executeWithoutFailing(() -> trafficAuditorAdapter.startTraceCapture(self)); executeWithoutFailing(() -> trafficAuditorAdapter.endTraceCapture(result)); return result; } catch (final Throwable e) { executeWithoutFailing(() -> trafficAuditorAdapter.endTraceCapture(e)); throw e; } } private void executeWithoutFailing(final Runnable runnable) { try { runnable.run(); } catch (final Exception e) { logger.trace("Error while intercepting Hyperwallet traffic", e); } } }
5,758
0
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/aspectj/com/paypal/observability/trafficauditor
Create_ds/mirakl-hyperwallet-connector/hmc-observability/src/main/aspectj/com/paypal/observability/trafficauditor/instrumentation/MiraklPerformRequestAspect.java
package com.paypal.observability.trafficauditor.instrumentation; import com.paypal.observability.trafficauditor.adapters.mirakl.MiraklTrafficAuditorAdapter; import org.aspectj.lang.annotation.AfterThrowing; import org.aspectj.lang.annotation.Aspect; import org.slf4j.Logger; import static org.slf4j.LoggerFactory.getLogger; @Aspect public class MiraklPerformRequestAspect { private static final Logger logger = getLogger(MiraklPerformRequestAspect.class); @SuppressWarnings("java:S3252") @AfterThrowing(pointcut = "execution(* com.mirakl.client.core.AbstractMiraklApiClient.performRequest(..))", throwing = "thrown", argNames = "thrown") public void exit(final Throwable thrown) { try { MiraklTrafficAuditorAdapter.get().endTraceCapture(thrown); } catch (final Exception e) { logger.trace("Error while intercepting Mirakl HTTP traffic", e); } } }
5,759
0
Create_ds/porting-advisor-for-graviton/sample-projects
Create_ds/porting-advisor-for-graviton/sample-projects/java-samples/main.java
class Main { public static void main(String[] args) { System.out.println("Hello World"); } }
5,760
0
Create_ds/porting-advisor-for-graviton/sample-projects
Create_ds/porting-advisor-for-graviton/sample-projects/java-samples/submain.java
class Submain { public static void main(String[] args) { System.out.println("Hello World 2"); } }
5,761
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/test/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/test/java/com/amazonaws/labs/GravitonReadyAssessor/ConfigFileTest.java
package com.amazonaws.labs.GravitonReadyAssessor; import com.fasterxml.jackson.core.JsonProcessingException; import org.junit.Test; import static org.junit.Assert.*; import org.osgi.framework.Version; import org.osgi.framework.VersionRange; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Date; public class ConfigFileTest { @Test public void shouldLoadConfigFile() { try { URL url = new URL("file:src/test/files/config.json"); Config c = Config.fromURL(url); System.out.println(c); } catch (IOException e) { fail(e.toString()); } } @Test public void shouldPrintJSON() { try { Config c = generateConfig(); System.out.println(c.toJson()); } catch(JsonProcessingException e) { fail(e.toString()); } } @Test public void shouldSerializeDeserialize() { try { Config c1 = generateConfig(); String json = c1.toJson(); Config c2 = Config.fromJson(json); assertEquals(c1, c2); } catch(JsonProcessingException e) { fail(e.toString()); } } @Test public void versionInRange() { Config config = generateConfig(); assert(config.getClassInfos().size() == 1); ClassInfo info = config.getClassInfos().get(0); // TODO return; } private Config generateConfig() { try { ClassInfo i = ClassInfo.builder() .implementationTitle("ImplementationTitle") .implementationVendor("ImplementationVendor") .implementationVersionRange( new VersionRange( VersionRange.LEFT_CLOSED, new Version(1, 0, 0), new Version(2, 0, 0), VersionRange.RIGHT_OPEN) ) .specificationTitle("SpecificationTitle") .specificationVendor("SpecificationVendor") .specificationVersionRange( new VersionRange( VersionRange.LEFT_CLOSED, new Version(1, 0, 0), new Version(2, 0, 0), VersionRange.RIGHT_OPEN) ) .description("Description goes here") .status("OK") .url(new URL("http://example.com")) .lastUpdated(new Date()) .build(); return Config.builder() .classInfo(i) .build(); } catch (MalformedURLException e) { fail(e.toString()); return null; } } }
5,762
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/test/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/test/java/com/amazonaws/labs/GravitonReadyAssessor/AppTest.java
package com.amazonaws.labs.GravitonReadyAssessor; import static org.junit.Assert.assertTrue; import org.junit.Test; /** * Unit test for simple App. */ public class AppTest { /** * Rigorous Test :-) */ @Test public void shouldAnswerWithTrue() { assertTrue( true ); } }
5,763
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/NativeCodeManifest.java
package com.amazonaws.labs.GravitonReadyAssessor; import lombok.Getter; import lombok.NonNull; import java.io.IOException; import java.util.Arrays; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.List; import java.util.stream.Collectors; /** * <p>A native code bundle JAR manifest entry.</p> * * <p>JAR files have * <a href="https://docs.oracle.com/javase/tutorial/deployment/jar/manifestindex.html">manifests</a> * in them which contain various metadata in them. These metadata are known as * <a href="https://docs.oracle.com/javase/tutorial/deployment/jar/secman.html">attributes</a>. Some * JAR files have a <code>Bundle-NativeCode</code> attribute in them that indicates where native code * can be found. The format of this attribute's value is defined by the OSGI Framework and is * documented <a href="http://docs.osgi.org/specification/osgi.core/7.0.0/framework.module.html#framework.module-loading.native.code.libraries">here</a>.</p> */ public class NativeCodeManifest { final static String BundleNativeCode = "Bundle-NativeCode"; @Getter private List<NativeCodeManifestRecord> records; /** * Constructs a NativeCodeManifest from a JarFile object. * @param jarFile the JarFile * @return the NativeCodeManifest * @throws IOException */ public static NativeCodeManifest fromJarFile(@NonNull JarFile jarFile) throws IOException { Manifest manifest = jarFile.getManifest(); Attributes attrs = manifest.getMainAttributes(); String bundleNativeCode = attrs.getValue(BundleNativeCode); if (bundleNativeCode == null) return null; return fromString(bundleNativeCode); } /** * Constructs a NativeCodeManifest from a JarFile object. * @param attributeValue the value of the Bundle-NativeCode Manifest attribute * @return the NativeCodeManifest */ private static NativeCodeManifest fromString(@NonNull String attributeValue) { NativeCodeManifest manifest = new NativeCodeManifest(); // Records are separated by `,` manifest.records = Arrays.stream(attributeValue.split(",")) .map(String::trim) .map(NativeCodeManifestRecord::fromString) .collect(Collectors.toUnmodifiableList()); return manifest; } }
5,764
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/JarNativeInfo.java
package com.amazonaws.labs.GravitonReadyAssessor; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.Files; import java.nio.file.Path; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.ZipException; import static java.nio.file.StandardOpenOption.APPEND; import lombok.*; /** * JarNativeInfo contains information about native libraries inside a JAR file. */ public class JarNativeInfo { private final static String[] IGNORED_PREFIXES = { "jdk.internal" }; /** * The actual on-disk path to the JAR file, which may be a temporary file if the JAR * was embedded in another JAR. */ @Getter @NonNull private Path realJarPath; /** * Only used for embedded JARs. Points to the path inside the JAR. */ @Getter private Path nominalJarPath; /** * Shared libraries */ @Getter private final List<String> sharedLibs = new ArrayList<>(); /** * Native methods */ @Getter private final List<Method> nativeMethods = new ArrayList<>(); /** * Native information associated with embedded JARs */ @Getter private final List<JarNativeInfo> children = new ArrayList<>(); final Logger log = SimpleLogger.getLogger(); static ConcurrentHashMap<Path, ClassLoader> cache = new ConcurrentHashMap<>(); /** * Builds a JarNativeInfo object * @param jarPath the path to the JAR file * @throws IOException */ public JarNativeInfo(@NonNull Path jarPath) throws IOException { this(jarPath, null); } /** * Builds a JarNativeInfo object * @param realJarPath the path to the JAR file on disk * @param nominalPath for embedded JARs, the path in the enclosing JAR file where this JAR is located * @throws IOException */ public JarNativeInfo(@NonNull Path realJarPath, Path nominalPath) throws IOException { this.realJarPath = realJarPath; this.nominalJarPath = nominalPath; if (nominalPath == null) { log.info("🛃 Checking JAR " + realJarPath); } else { log.info("🛃 Checking embedded JAR " + nominalPath.toString()); } try { @Cleanup JarFile jarFile = new JarFile(realJarPath.toFile()); final Enumeration<JarEntry> entries = jarFile.entries(); while (entries.hasMoreElements()) { final JarEntry entry = entries.nextElement(); final String entryName = entry.getName(); if (entry.isDirectory()) continue; if (entryName.endsWith(".jar")) { // Embedded JAR file // Extract the JAR file to a temporary location @Cleanup InputStream is = jarFile.getInputStream(entry); Path tmpJarPath = Files.createTempFile(null, null); tmpJarPath.toFile().deleteOnExit(); @Cleanup OutputStream os = Files.newOutputStream(tmpJarPath, APPEND); is.transferTo(os); // Process the embedded JAR recursively JarNativeInfo nativeInfo = new JarNativeInfo(tmpJarPath, Path.of(entryName)); children.add(nativeInfo); } else if (entryName.endsWith(".class")) { String className = entryName .substring(0, entry.getName().length() - ".class".length()) .replace('/', '.'); // Skip JDK internal classes if (Arrays.stream(IGNORED_PREFIXES).anyMatch(className::startsWith)) continue; // Load the class and find its native methods Class<?> c = loadClass(className, realJarPath); if (c != null) { try { nativeMethods.addAll(findNativeMethods(c)); } catch (NoClassDefFoundError ignored) { } } } } // No need to proceed if there aren't any native methods. if (nativeMethods.isEmpty()) return; JarChecker scanner; // First try to find the shared libraries by scanning the JAR manifest scanner = new JarManifestScanner(jarFile); sharedLibs.addAll(scanner.getSharedLibraryPaths()); // Then try to find shared libraries by examining the JAR table of contents scanner = new JarFileScanner(jarFile); sharedLibs.addAll(scanner.getSharedLibraryPaths()); } catch (ZipException e) { // Treat empty JAR files as though they have no methods at all. if (e.getMessage().equals("zip file is empty")) { return; } throw e; } } public boolean hasNativeMethods() { return !nativeMethods.isEmpty(); } private List<Method> findNativeMethods(@NonNull Class<?> c) { log.fine("🧐 Getting native methods for class " + c.getName()); return Stream.of(c.getDeclaredMethods()) .peek(m -> log.finer("Checking method " + m.getName())) .filter(m -> Modifier.isNative(m.getModifiers())) .collect(Collectors.toList()); } private Class<?> loadClass(@NonNull String name, @NonNull Path jarPath) { ClassLoader cl; Class<?> cls = null; try { cl = cache.computeIfAbsent(jarPath, k -> { try { URL[] urls = {new URL("jar:file:" + k + "!/")}; return new URLClassLoader(urls); } catch (MalformedURLException e) { e.printStackTrace(); return null; } }); assert cl != null; cls = cl.loadClass(name); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (NoClassDefFoundError|IllegalAccessError ignored) { } return cls; } }
5,765
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/Command.java
package com.amazonaws.labs.GravitonReadyAssessor; import java.io.IOException; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.util.*; import java.lang.reflect.Method; import java.util.logging.*; import java.util.stream.Collectors; import java.util.concurrent.Callable; import lombok.Getter; import lombok.NonNull; import picocli.CommandLine; import picocli.CommandLine.Option; import picocli.CommandLine.Parameters; @CommandLine.Command(name = "Arm64LinuxJarChecker", description = "Checks JAR/WAR files for compatibility with Arm64 CPU architecture on Linux", mixinStandardHelpOptions = true, exitCodeListHeading = "Exit Codes:%n", exitCodeList = { "0: Successful execution, no problems found", "3: Found native classes but no Arm64/Linux shared libraries in JARs" }) final public class Command implements Callable<Integer> { @Parameters(description = "Files or directories in which JARs are located (default: current working directory)") private final List<String> searchPaths = new ArrayList<>(); @Option(names = {"-v", "--verbose"}, description = "Run verbosely") private boolean verbose; @Override public Integer call() throws IOException { int exitCode = 0; Logger log = SimpleLogger.getLogger(); if (verbose) { SimpleLogger.setLevel(Level.ALL); } final class JarSearcher extends SimpleFileVisitor<Path> { private final PathMatcher jarFileMatcher = FileSystems.getDefault().getPathMatcher("regex:.*\\.(jar|war)$"); @Getter private final List<Path> nativeJarFiles = new ArrayList<>(); private final Map<Path, List<String>> nativeLibraryFiles = new HashMap<>(); private final Map<Path, List<Method>> nativeMethods = new HashMap<>(); private void processNativeInfo(@NonNull JarNativeInfo info) { if (info.hasNativeMethods()) { nativeJarFiles.add(info.getNominalJarPath()); nativeLibraryFiles.put(info.getNominalJarPath(), info.getSharedLibs()); nativeMethods.put(info.getNominalJarPath(), info.getNativeMethods()); } for (JarNativeInfo childInfo : info.getChildren()) { processNativeInfo(childInfo); } } @Override public FileVisitResult visitFile(@NonNull Path path, @NonNull BasicFileAttributes attrs) throws IOException { if (!jarFileMatcher.matches(path)) return FileVisitResult.CONTINUE; processNativeInfo(new JarNativeInfo(path)); return FileVisitResult.CONTINUE; } public List<String> getNativeLibraries(Path path) { return nativeLibraryFiles.get(path); } public List<Method> getNativeMethods(Path path) { return nativeMethods.get(path); } public boolean hasNativeJars() { return !nativeJarFiles.isEmpty(); } public boolean hasNativeLibraries(Path path) { return !nativeLibraryFiles.get(path).isEmpty(); } } log.info("🟢 Starting search for native classes in JAR files"); if (searchPaths.isEmpty()) { searchPaths.add(""); } // Search JARs and classes JarSearcher finder = new JarSearcher(); for (String searchPath : searchPaths) { Files.walkFileTree( Paths.get(searchPath), // start with current working directory finder); } final List<Path> nativeJars = finder.getNativeJarFiles(); if (!finder.hasNativeJars()) { log.info("🎉 No native methods found in scanned JAR files. These should work on any supported CPU architecture."); System.exit(0); } for (Path jarPath : nativeJars) { log.info("ℹ️ JAR with native methods: " + jarPath); log.fine("ℹ️ Methods: " + finder.getNativeMethods(jarPath) .stream() .map(m -> String.format("%s::%s", m.getDeclaringClass().getName(), m.getName())) .distinct() .collect(Collectors.joining(", "))); if (finder.hasNativeLibraries(jarPath)) { log.info("✅ Native libraries: " + finder.getNativeLibraries(jarPath) .stream() .distinct() .collect(Collectors.joining(", "))); } else { log.info("🚨 JAR " + jarPath + " has native methods but no libraries found for aarch64/Linux"); log.info("Native methods: " + finder.getNativeMethods(jarPath).stream().distinct().map(Method::toString).collect(Collectors.joining(", "))); exitCode = 3; } } if (exitCode == 0) { log.info(String.format("%n🎉 JAR files scanned and native libraries appear to be all there. You're good to go!")); } else { log.info(String.format("%n🚨 Found JAR files with native methods but no Linux/arm64 support.")); } return exitCode; } public static void main(String... args) { int exitCode = new CommandLine(new Command()).execute(args); System.exit(exitCode); } }
5,766
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/JarCheckerInterface.java
package com.amazonaws.labs.GravitonReadyAssessor; import java.io.IOException; import java.util.List; public interface JarCheckerInterface { List<String> getSharedLibraryPaths() throws IOException; }
5,767
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/JarChecker.java
package com.amazonaws.labs.GravitonReadyAssessor; public abstract class JarChecker implements JarCheckerInterface { }
5,768
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/JarFileScanner.java
package com.amazonaws.labs.GravitonReadyAssessor; import lombok.NonNull; import lombok.RequiredArgsConstructor; import java.io.IOException; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.logging.Logger; /** * A JAR file scanner that locates native code libraries via simple * path-matching. If the file ends in .so and has the strings "aarch64" * and "linux", it is considered a match. */ @RequiredArgsConstructor public class JarFileScanner extends JarChecker { Logger logger = SimpleLogger.getLogger(); @NonNull private JarFile jarFile; /** * Return a list of path names corresponding to shared library files * in the JAR file. * * @return list of shared library pathnames * @throws IOException */ public List<String> getSharedLibraryPaths() throws IOException { final List<String> sharedLibraryPaths = new ArrayList<>(); final Enumeration<JarEntry> entries = jarFile.entries(); while (entries.hasMoreElements()) { final JarEntry entry = entries.nextElement(); final String entryName = entry.getName(); if (!entry.isDirectory() && entryName.endsWith(".so") && entryName.toLowerCase().contains("aarch64") && entryName.toLowerCase().contains("linux")) sharedLibraryPaths.add(entryName); } return sharedLibraryPaths; } }
5,769
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/ClassInfo.java
package com.amazonaws.labs.GravitonReadyAssessor; import java.net.URL; import java.util.Date; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; import lombok.Builder; import lombok.Data; import lombok.extern.jackson.Jacksonized; import org.osgi.framework.VersionRange; @Data @Builder @Jacksonized public class ClassInfo { private String implementationTitle; private String implementationVendor; @JsonSerialize(using = ToStringSerializer.class) private VersionRange implementationVersionRange; private String specificationTitle; private String specificationVendor; @JsonSerialize(using = ToStringSerializer.class) private VersionRange specificationVersionRange; private String status; private String description; private URL url; @JsonFormat(shape = JsonFormat.Shape.STRING) private Date lastUpdated; }
5,770
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/Config.java
package com.amazonaws.labs.GravitonReadyAssessor; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Builder; import lombok.Data; import lombok.Singular; import lombok.extern.jackson.Jacksonized; import java.io.IOException; import java.net.URL; import java.util.List; @Data @Builder @Jacksonized public class Config { @JsonProperty("classes") @Singular public List<ClassInfo> classInfos; public static Config fromURL(URL url) throws IOException { ObjectMapper mapper = new ObjectMapper(); return mapper.readerFor(Config.class).readValue(url); } public static Config fromJson(String s) throws JsonProcessingException, JsonMappingException { ObjectMapper mapper = new ObjectMapper(); return mapper.readerFor(Config.class).readValue(s); } public String toJson() throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(this); } }
5,771
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/SimpleLogger.java
package com.amazonaws.labs.GravitonReadyAssessor; import lombok.NonNull; import java.io.OutputStream; import java.util.Properties; import java.util.logging.*; /** * A simple unbuffered logger that simply prints each log line as-is to standard output (System.out). */ public class SimpleLogger { private static Logger logger; private static Handler handler; static { Properties logProps = System.getProperties(); logProps.setProperty("java.util.logging.SimpleFormatter.format", "%5$s%n"); System.setProperties(logProps); } /** * Obtain the singleton Logger instance. * * @return The logger instance * @throws SecurityException */ public static Logger getLogger() throws SecurityException { if (logger != null) { return logger; } logger = Logger.getLogger(SimpleLogger.class.toString()); logger.setUseParentHandlers(false); handler = getAutoFlushingStreamHandler(System.out, new SimpleFormatter()); logger.addHandler(handler); return logger; } /** * Sets the lowest log level that this logger will emit. Logs with a level lower than * this will be omitted from the output. * * @param level The log level */ public static void setLevel(@NonNull Level level) { if (logger == null) getLogger(); handler.setLevel(level); logger.setLevel(level); } /** * Returns a StreamHandler that flushes after every publish() invocation. * @param o the OutputStream passed to the StreamHandler constructor * @param f the Formatter passed to the StreamHandler constructor * @return */ private static StreamHandler getAutoFlushingStreamHandler(@NonNull OutputStream o, @NonNull Formatter f) { return new StreamHandler(o, f) { @Override public synchronized void publish(@NonNull final LogRecord record) { super.publish(record); flush(); } }; } }
5,772
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/JarManifestScanner.java
package com.amazonaws.labs.GravitonReadyAssessor; import lombok.NonNull; import lombok.RequiredArgsConstructor; import java.io.IOException; import java.util.List; import java.util.jar.JarFile; import java.util.stream.Collectors; /** * A JAR file scanner that locates native code libraries by looking at * the JAR's manifest. It uses the OSGI <code>Bundle-NativeCode</code> * attribute for this purpose. */ @RequiredArgsConstructor public class JarManifestScanner extends JarChecker { @NonNull private JarFile jarFile; /** * Return a list of path names corresponding to shared library files * in the JAR file. * * @return list of shared library pathnames * @throws IOException */ public List<String> getSharedLibraryPaths() throws IOException { NativeCodeManifest manifest = NativeCodeManifest.fromJarFile(this.jarFile); // No native code manifest found if (manifest == null) return List.of(); return manifest.getRecords().stream() .filter(NativeCodeManifestRecord::isAarch64) .filter(NativeCodeManifestRecord::isLinux) .map(NativeCodeManifestRecord::getLibpath) .collect(Collectors.toList()); } }
5,773
0
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs
Create_ds/porting-advisor-for-graviton/src/advisor/tools/graviton-ready-java/src/main/java/com/amazonaws/labs/GravitonReadyAssessor/NativeCodeManifestRecord.java
package com.amazonaws.labs.GravitonReadyAssessor; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import java.util.ArrayList; import java.util.List; /** * A record in a Bundle-NativeCode JAR manifest attribute. */ public class NativeCodeManifestRecord { @Getter @Setter private String libpath; private final List<String> osnames = new ArrayList<>(); private final List<String> arches = new ArrayList<>(); /** * Creates a NativeCodeManifestRecord from its string representation. * @param text The raw text * @return a NativeCodeManifestRecord */ public static NativeCodeManifestRecord fromString(@NonNull String text) { NativeCodeManifestRecord entry = new NativeCodeManifestRecord(); List<String> kvPairs = List.of(text.split(";")); entry.setLibpath(kvPairs.get(0)); // Record any processor architectures or OS names found within kvPairs.stream().skip(1).forEach(pair -> { String key = pair.split("=")[0]; String val = pair.split("=")[1]; if (key.equals("osname")) { entry.addOSName(val); } if (key.equals("processor")) { entry.addArch(val); } }); return entry; } public void addOSName(String osName) { osnames.add(osName); } public void addArch(String arch) { arches.add(arch); } public boolean isLinux() { return osnames.stream().anyMatch(name -> name.equalsIgnoreCase("linux")); } public boolean isAarch64() { return arches.stream().anyMatch(name -> name.equalsIgnoreCase("aarch64")); } @Override public String toString() { return "libpath: " + libpath + "; arches=" + this.arches + "; osnames=" + this.osnames; } }
5,774
0
Create_ds/flink-statefun/statefun-shaded/statefun-protobuf-shaded/src/main/resources/META-INF
Create_ds/flink-statefun/statefun-shaded/statefun-protobuf-shaded/src/main/resources/META-INF/licenses/LICENSE.protobuf-java
Copyright 2008 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Code generated by the Protocol Buffer compiler is owned by the owner of the input file used when generating it. This code is not standalone and requires a support library to be linked with it. This support library is itself covered by the above license.
5,775
0
Create_ds/flink-statefun/statefun-shaded/statefun-protobuf-shaded/src/main/java/org/apache/flink/statefun/sdk/shaded/com/google
Create_ds/flink-statefun/statefun-shaded/statefun-protobuf-shaded/src/main/java/org/apache/flink/statefun/sdk/shaded/com/google/protobuf/MoreByteStrings.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.shaded.com.google.protobuf; import java.nio.ByteBuffer; public class MoreByteStrings { public static ByteString wrap(byte[] bytes) { return ByteString.wrap(bytes); } public static ByteString wrap(byte[] bytes, int offset, int len) { return ByteString.wrap(bytes, offset, len); } public static ByteString wrap(ByteBuffer buffer) { return ByteString.wrap(buffer); } public static ByteString concat(ByteString first, ByteString second) { return first.concat(second); } }
5,776
0
Create_ds/flink-statefun/statefun-kinesis-io/src/test/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kinesis-io/src/test/java/org/apache/flink/statefun/sdk/kinesis/KinesisIngressBuilderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.util.Collections; import org.apache.flink.statefun.sdk.io.IngressIdentifier; import org.apache.flink.statefun.sdk.kinesis.ingress.IngressRecord; import org.apache.flink.statefun.sdk.kinesis.ingress.KinesisIngressBuilder; import org.apache.flink.statefun.sdk.kinesis.ingress.KinesisIngressDeserializer; import org.apache.flink.statefun.sdk.kinesis.ingress.KinesisIngressSpec; import org.junit.Test; public class KinesisIngressBuilderTest { private static final IngressIdentifier<String> ID = new IngressIdentifier<>(String.class, "namespace", "name"); private static final String STREAM_NAME = "test-stream"; @Test public void exampleUsage() { final KinesisIngressSpec<String> kinesisIngressSpec = KinesisIngressBuilder.forIdentifier(ID) .withDeserializer(TestDeserializer.class) .withStream(STREAM_NAME) .build(); assertThat(kinesisIngressSpec.id(), is(ID)); assertThat(kinesisIngressSpec.streams(), is(Collections.singletonList(STREAM_NAME))); assertTrue(kinesisIngressSpec.awsRegion().get().isDefault()); assertTrue(kinesisIngressSpec.awsCredentials().get().isDefault()); assertThat(kinesisIngressSpec.deserializer(), instanceOf(TestDeserializer.class)); assertTrue(kinesisIngressSpec.startupPosition().isLatest()); assertTrue(kinesisIngressSpec.properties().isEmpty()); } private static final class TestDeserializer implements KinesisIngressDeserializer<String> { private static final long serialVersionUID = 1L; @Override public String deserialize(IngressRecord ingressRecord) { return null; } } }
5,777
0
Create_ds/flink-statefun/statefun-kinesis-io/src/test/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kinesis-io/src/test/java/org/apache/flink/statefun/sdk/kinesis/KinesisEgressBuilderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import org.apache.flink.statefun.sdk.io.EgressIdentifier; import org.apache.flink.statefun.sdk.kinesis.egress.EgressRecord; import org.apache.flink.statefun.sdk.kinesis.egress.KinesisEgressBuilder; import org.apache.flink.statefun.sdk.kinesis.egress.KinesisEgressSerializer; import org.apache.flink.statefun.sdk.kinesis.egress.KinesisEgressSpec; import org.junit.Test; public class KinesisEgressBuilderTest { private static final EgressIdentifier<String> ID = new EgressIdentifier<>("namespace", "name", String.class); @Test public void exampleUsage() { final KinesisEgressSpec<String> kinesisEgressSpec = KinesisEgressBuilder.forIdentifier(ID).withSerializer(TestSerializer.class).build(); assertThat(kinesisEgressSpec.id(), is(ID)); assertTrue(kinesisEgressSpec.awsRegion().isDefault()); assertTrue(kinesisEgressSpec.awsCredentials().isDefault()); assertEquals(TestSerializer.class, kinesisEgressSpec.serializerClass()); assertTrue(kinesisEgressSpec.clientConfigurationProperties().isEmpty()); } private static final class TestSerializer implements KinesisEgressSerializer<String> { private static final long serialVersionUID = 1L; @Override public EgressRecord serialize(String value) { return null; } } }
5,778
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/KinesisIOTypes.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis; import org.apache.flink.statefun.sdk.EgressType; import org.apache.flink.statefun.sdk.IngressType; public final class KinesisIOTypes { private KinesisIOTypes() {} public static final IngressType UNIVERSAL_INGRESS_TYPE = new IngressType("statefun.kinesis.io", "universal-ingress"); public static final EgressType UNIVERSAL_EGRESS_TYPE = new EgressType("statefun.kinesis.io", "universal-egress"); }
5,779
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/egress/EgressRecord.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.egress; import java.util.Objects; import javax.annotation.Nullable; /** A record to be written to AWS Kinesis. */ public final class EgressRecord { private final byte[] data; private final String stream; private final String partitionKey; @Nullable private final String explicitHashKey; /** @return A builder for a {@link EgressRecord}. */ public static Builder newBuilder() { return new Builder(); } private EgressRecord( byte[] data, String stream, String partitionKey, @Nullable String explicitHashKey) { this.data = Objects.requireNonNull(data, "data bytes"); this.stream = Objects.requireNonNull(stream, "target stream"); this.partitionKey = Objects.requireNonNull(partitionKey, "partition key"); this.explicitHashKey = explicitHashKey; } /** @return data bytes to write */ public byte[] getData() { return data; } /** @return target AWS Kinesis stream to write to. */ public String getStream() { return stream; } /** @return partition key to use when writing the record to AWS Kinesis. */ public String getPartitionKey() { return partitionKey; } /** @return explicit hash key to use when writing the record to AWS Kinesis. */ @Nullable public String getExplicitHashKey() { return explicitHashKey; } /** Builder for {@link EgressRecord}. */ public static final class Builder { private byte[] data; private String stream; private String partitionKey; private String explicitHashKey; private Builder() {} public Builder withData(byte[] data) { this.data = data; return this; } public Builder withStream(String stream) { this.stream = stream; return this; } public Builder withPartitionKey(String partitionKey) { this.partitionKey = partitionKey; return this; } public Builder withExplicitHashKey(String explicitHashKey) { this.explicitHashKey = explicitHashKey; return this; } public EgressRecord build() { return new EgressRecord(data, stream, partitionKey, explicitHashKey); } } }
5,780
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/egress/KinesisEgressBuilder.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.egress; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.io.EgressIdentifier; import org.apache.flink.statefun.sdk.io.EgressSpec; import org.apache.flink.statefun.sdk.kinesis.auth.AwsCredentials; import org.apache.flink.statefun.sdk.kinesis.auth.AwsRegion; /** * A builder for creating an {@link EgressSpec} for writing data to AWS Kinesis. * * @param <T> The type written to AWS Kinesis. */ public final class KinesisEgressBuilder<T> { private final EgressIdentifier<T> id; private Class<? extends KinesisEgressSerializer<T>> serializerClass; private int maxOutstandingRecords = 1000; private AwsRegion awsRegion = AwsRegion.fromDefaultProviderChain(); private AwsCredentials awsCredentials = AwsCredentials.fromDefaultProviderChain(); private final Properties properties = new Properties(); private KinesisEgressBuilder(EgressIdentifier<T> id) { this.id = Objects.requireNonNull(id); } /** * @param id A unique egress identifier. * @param <T> The type consumed from Kinesis. * @return A new {@link KinesisEgressBuilder}. */ public static <T> KinesisEgressBuilder<T> forIdentifier(EgressIdentifier<T> id) { return new KinesisEgressBuilder<>(id); } /** * @param serializerClass The serializer used to convert from Java objects to Kinesis's byte * messages. */ public KinesisEgressBuilder<T> withSerializer( Class<? extends KinesisEgressSerializer<T>> serializerClass) { this.serializerClass = Objects.requireNonNull(serializerClass); return this; } /** * The AWS region to connect to. By default, AWS's default provider chain is consulted. * * @param awsRegion The AWS region to connect to. * @see <a * href="https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-region-selection.html#automatically-determine-the-aws-region-from-the-environment">Automatically * Determine the AWS Region from the Environment</a>. * @see AwsRegion */ public KinesisEgressBuilder<T> withAwsRegion(AwsRegion awsRegion) { this.awsRegion = Objects.requireNonNull(awsRegion); return this; } /** * The AWS region to connect to, specified by the AWS region's unique id. By default, AWS's * default provider chain is consulted. * * @param regionName The unique id of the AWS region to connect to. */ public KinesisEgressBuilder<T> withAwsRegion(String regionName) { this.awsRegion = AwsRegion.ofId(regionName); return this; } /** * The AWS credentials to use. By default, AWS's default provider chain is consulted. * * @param awsCredentials The AWS credentials to use. * @see <a * href="https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html#credentials-default">Using * the Default Credential Provider Chain</a>. * @see AwsCredentials */ public KinesisEgressBuilder<T> withAwsCredentials(AwsCredentials awsCredentials) { this.awsCredentials = Objects.requireNonNull(awsCredentials); return this; } /** * The maximum number of buffered outstanding records, before backpressure is applied by the * egress. * * @param maxOutstandingRecords the maximum number of buffered outstanding records */ public KinesisEgressBuilder<T> withMaxOutstandingRecords(int maxOutstandingRecords) { if (maxOutstandingRecords <= 0) { throw new IllegalArgumentException("Max outstanding records must be larger than 0."); } this.maxOutstandingRecords = maxOutstandingRecords; return this; } /** * Sets a AWS client configuration to be used by the egress. * * <p>Supported values are properties of AWS's <a * href="https://javadoc.io/static/com.amazonaws/amazon-kinesis-producer/latest/com/amazonaws/services/kinesis/producer/KinesisProducerConfiguration.html">ccom.amazonaws.services.kinesis.producer.KinesisProducerConfiguration</a>. * Please see <a * href="https://github.com/awslabs/amazon-kinesis-producer/blob/master/java/amazon-kinesis-producer-sample/default_config.properties">Default * Configuration Properties</a> for a full list of the keys. * * @param key the property to set. * @param value the value for the property. * @see <a * href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/ClientConfiguration.html">com.aws.ClientConfiguration</a>. * @deprecated Please use {@link #withProperty(String, String)} instead. */ public KinesisEgressBuilder<T> withClientConfigurationProperty(String key, String value) { Objects.requireNonNull(key); Objects.requireNonNull(value); this.properties.setProperty(key, value); return this; } public KinesisEgressBuilder<T> withProperty(String key, String value) { Objects.requireNonNull(key); Objects.requireNonNull(value); this.properties.setProperty(key, value); return this; } public KinesisEgressBuilder<T> withProperties(Properties properties) { Objects.requireNonNull(properties); this.properties.putAll(properties); return this; } /** @return A new {@link KinesisEgressSpec}. */ public KinesisEgressSpec<T> build() { return new KinesisEgressSpec<>( id, serializerClass, maxOutstandingRecords, awsRegion, awsCredentials, properties); } }
5,781
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/egress/KinesisEgressSerializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.egress; import java.io.Serializable; /** * Defines how to serialize values of type {@code T} into {@link EgressRecord}s to be written to AWS * Kinesis. * * @param <T> the type of values being written. */ public interface KinesisEgressSerializer<T> extends Serializable { /** * Serialize an output value into a {@link EgressRecord} to be written to AWS Kinesis. * * @param value the output value to write. * @return a {@link EgressRecord} to be written to AWS Kinesis. */ EgressRecord serialize(T value); }
5,782
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/egress/KinesisEgressSpec.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.egress; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.EgressType; import org.apache.flink.statefun.sdk.io.EgressIdentifier; import org.apache.flink.statefun.sdk.io.EgressSpec; import org.apache.flink.statefun.sdk.kinesis.KinesisIOTypes; import org.apache.flink.statefun.sdk.kinesis.auth.AwsCredentials; import org.apache.flink.statefun.sdk.kinesis.auth.AwsRegion; public final class KinesisEgressSpec<T> implements EgressSpec<T> { private final EgressIdentifier<T> egressIdentifier; private final Class<? extends KinesisEgressSerializer<T>> serializerClass; private final int maxOutstandingRecords; private final AwsRegion awsRegion; private final AwsCredentials awsCredentials; private final Properties clientConfigurationProperties; KinesisEgressSpec( EgressIdentifier<T> egressIdentifier, Class<? extends KinesisEgressSerializer<T>> serializerClass, int maxOutstandingRecords, AwsRegion awsRegion, AwsCredentials awsCredentials, Properties clientConfigurationProperties) { this.egressIdentifier = Objects.requireNonNull(egressIdentifier); this.serializerClass = Objects.requireNonNull(serializerClass); this.maxOutstandingRecords = maxOutstandingRecords; this.awsRegion = Objects.requireNonNull(awsRegion); this.awsCredentials = Objects.requireNonNull(awsCredentials); this.clientConfigurationProperties = Objects.requireNonNull(clientConfigurationProperties); } @Override public EgressIdentifier<T> id() { return egressIdentifier; } @Override public EgressType type() { return KinesisIOTypes.UNIVERSAL_EGRESS_TYPE; } public Class<? extends KinesisEgressSerializer<T>> serializerClass() { return serializerClass; } public int maxOutstandingRecords() { return maxOutstandingRecords; } public AwsRegion awsRegion() { return awsRegion; } public AwsCredentials awsCredentials() { return awsCredentials; } public Properties clientConfigurationProperties() { return clientConfigurationProperties; } }
5,783
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/ingress/KinesisIngressSpec.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.ingress; import java.util.List; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.IngressType; import org.apache.flink.statefun.sdk.core.OptionalProperty; import org.apache.flink.statefun.sdk.io.IngressIdentifier; import org.apache.flink.statefun.sdk.io.IngressSpec; import org.apache.flink.statefun.sdk.kinesis.KinesisIOTypes; import org.apache.flink.statefun.sdk.kinesis.auth.AwsCredentials; import org.apache.flink.statefun.sdk.kinesis.auth.AwsRegion; public final class KinesisIngressSpec<T> implements IngressSpec<T> { private final IngressIdentifier<T> ingressIdentifier; private final List<String> streams; private final KinesisIngressDeserializer<T> deserializer; private final KinesisIngressStartupPosition startupPosition; private final OptionalProperty<AwsRegion> awsRegion; private final OptionalProperty<AwsCredentials> awsCredentials; private final Properties properties; KinesisIngressSpec( IngressIdentifier<T> ingressIdentifier, List<String> streams, KinesisIngressDeserializer<T> deserializer, KinesisIngressStartupPosition startupPosition, OptionalProperty<AwsRegion> awsRegion, OptionalProperty<AwsCredentials> awsCredentials, Properties properties) { this.ingressIdentifier = Objects.requireNonNull(ingressIdentifier, "ingress identifier"); this.deserializer = Objects.requireNonNull(deserializer, "deserializer"); this.startupPosition = Objects.requireNonNull(startupPosition, "startup position"); this.awsRegion = Objects.requireNonNull(awsRegion, "AWS region configuration"); this.awsCredentials = Objects.requireNonNull(awsCredentials, "AWS credentials configuration"); this.properties = Objects.requireNonNull(properties); this.streams = Objects.requireNonNull(streams, "AWS Kinesis stream names"); if (streams.isEmpty()) { throw new IllegalArgumentException( "Must have at least one stream to consume from specified."); } } @Override public IngressIdentifier<T> id() { return ingressIdentifier; } @Override public IngressType type() { return KinesisIOTypes.UNIVERSAL_INGRESS_TYPE; } public List<String> streams() { return streams; } public KinesisIngressDeserializer<T> deserializer() { return deserializer; } public KinesisIngressStartupPosition startupPosition() { return startupPosition; } public OptionalProperty<AwsRegion> awsRegion() { return awsRegion; } public OptionalProperty<AwsCredentials> awsCredentials() { return awsCredentials; } public Properties properties() { return properties; } }
5,784
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/ingress/KinesisIngressStartupPosition.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.ingress; import java.time.ZonedDateTime; /** Position for the ingress to start consuming AWS Kinesis shards. */ public abstract class KinesisIngressStartupPosition { private KinesisIngressStartupPosition() {} /** Start consuming from the earliest position possible. */ public static KinesisIngressStartupPosition fromEarliest() { return EarliestPosition.INSTANCE; } /** Start consuming from the latest position, i.e. head of the stream shards. */ public static KinesisIngressStartupPosition fromLatest() { return LatestPosition.INSTANCE; } /** * Start consuming from position with ingestion timestamps after or equal to a specified {@link * ZonedDateTime}. */ public static KinesisIngressStartupPosition fromDate(ZonedDateTime date) { return new DatePosition(date); } /** Checks whether this position is configured using the earliest position. */ public final boolean isEarliest() { return getClass() == EarliestPosition.class; } /** Checks whether this position is configured using the latest position. */ public final boolean isLatest() { return getClass() == LatestPosition.class; } /** Checks whether this position is configured using a date. */ public final boolean isDate() { return getClass() == DatePosition.class; } /** Returns this position as a {@link DatePosition}. */ public final DatePosition asDate() { if (!isDate()) { throw new IllegalStateException("This is not a startup position configured using a date."); } return (DatePosition) this; } @SuppressWarnings("WeakerAccess") public static final class EarliestPosition extends KinesisIngressStartupPosition { private static final EarliestPosition INSTANCE = new EarliestPosition(); } @SuppressWarnings("WeakerAccess") public static final class LatestPosition extends KinesisIngressStartupPosition { private static final LatestPosition INSTANCE = new LatestPosition(); } public static final class DatePosition extends KinesisIngressStartupPosition { private final ZonedDateTime date; private DatePosition(ZonedDateTime date) { this.date = date; } public ZonedDateTime date() { return date; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj == this) { return true; } if (!(obj instanceof DatePosition)) { return false; } DatePosition that = (DatePosition) obj; return that.date.equals(date); } @Override public int hashCode() { return date.hashCode(); } } }
5,785
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/ingress/KinesisIngressDeserializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.ingress; import java.io.Serializable; /** * Describes how to deserialize {@link IngressRecord}s consumed from AWS Kinesis into data types * that are processed by the system. * * @param <T> The type created by the ingress deserializer. */ public interface KinesisIngressDeserializer<T> extends Serializable { /** * Deserialize an input value from a {@link IngressRecord} consumed from AWS Kinesis. * * @param ingressRecord the {@link IngressRecord} consumed from AWS Kinesis. * @return the deserialized data object. */ T deserialize(IngressRecord ingressRecord); }
5,786
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/ingress/KinesisIngressBuilder.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.ingress; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.annotations.ForRuntime; import org.apache.flink.statefun.sdk.core.OptionalProperty; import org.apache.flink.statefun.sdk.io.IngressIdentifier; import org.apache.flink.statefun.sdk.io.IngressSpec; import org.apache.flink.statefun.sdk.kinesis.auth.AwsCredentials; import org.apache.flink.statefun.sdk.kinesis.auth.AwsRegion; /** * A builder for creating an {@link IngressSpec} for consuming data from AWS Kinesis. * * @param <T> The type consumed from AWS Kinesis. */ public final class KinesisIngressBuilder<T> { private final IngressIdentifier<T> id; private final List<String> streams = new ArrayList<>(); private KinesisIngressDeserializer<T> deserializer; private KinesisIngressStartupPosition startupPosition = KinesisIngressStartupPosition.fromLatest(); private OptionalProperty<AwsRegion> awsRegion = OptionalProperty.withDefault(AwsRegion.fromDefaultProviderChain()); private OptionalProperty<AwsCredentials> awsCredentials = OptionalProperty.withDefault(AwsCredentials.fromDefaultProviderChain()); /** * Contains properties for both the underlying AWS client, as well as Flink-connector specific * properties. */ private final Properties properties = new Properties(); private KinesisIngressBuilder(IngressIdentifier<T> id) { this.id = Objects.requireNonNull(id); } /** * @param id A unique ingress identifier. * @param <T> The type consumed from Kinesis. * @return A new {@link KinesisIngressBuilder}. */ public static <T> KinesisIngressBuilder<T> forIdentifier(IngressIdentifier<T> id) { return new KinesisIngressBuilder<>(id); } /** @param stream The name of a stream that should be consumed. */ public KinesisIngressBuilder<T> withStream(String stream) { this.streams.add(stream); return this; } /** @param streams A list of streams that should be consumed. */ public KinesisIngressBuilder<T> withStreams(List<String> streams) { this.streams.addAll(streams); return this; } /** * @param deserializerClass The deserializer used to convert between Kinesis's byte messages and * Java objects. */ public KinesisIngressBuilder<T> withDeserializer( Class<? extends KinesisIngressDeserializer<T>> deserializerClass) { Objects.requireNonNull(deserializerClass); this.deserializer = instantiateDeserializer(deserializerClass); return this; } /** * Configures the position that the ingress should start consuming from. By default, the startup * position is {@link KinesisIngressStartupPosition#fromLatest()}. * * <p>Note that this configuration only affects the position when starting the application from a * fresh start. When restoring the application from a savepoint, the ingress will always start * consuming from the position persisted in the savepoint. * * @param startupPosition the position that the Kafka ingress should start consuming from. * @see KinesisIngressStartupPosition */ public KinesisIngressBuilder<T> withStartupPosition( KinesisIngressStartupPosition startupPosition) { this.startupPosition = Objects.requireNonNull(startupPosition); return this; } /** * The AWS region to connect to. By default, AWS's default provider chain is consulted. * * @param awsRegion The AWS region to connect to. * @see <a * href="https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-region-selection.html#automatically-determine-the-aws-region-from-the-environment">Automatically * Determine the AWS Region from the Environment</a>. * @see AwsRegion */ public KinesisIngressBuilder<T> withAwsRegion(AwsRegion awsRegion) { this.awsRegion.set(Objects.requireNonNull(awsRegion)); return this; } /** * The AWS region to connect to, specified by the AWS region's unique id. By default, AWS's * default provider chain is consulted. * * @param regionName The unique id of the AWS region to connect to. */ public KinesisIngressBuilder<T> withAwsRegion(String regionName) { this.awsRegion.set(AwsRegion.ofId(regionName)); return this; } /** * The AWS credentials to use. By default, AWS's default provider chain is consulted. * * @param awsCredentials The AWS credentials to use. * @see <a * href="https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html#credentials-default">Using * the Default Credential Provider Chain</a>. * @see AwsCredentials */ public KinesisIngressBuilder<T> withAwsCredentials(AwsCredentials awsCredentials) { this.awsCredentials.set(Objects.requireNonNull(awsCredentials)); return this; } /** * Sets a AWS client configuration to be used by the ingress. * * <p>Supported values are properties of AWS's <a * href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/ClientConfiguration.html">com.aws.ClientConfiguration</a>. * For example, to set a value for {@code SOCKET_TIMEOUT}, the property key would be {@code * SocketTimeout}. * * @param key the property to set. * @param value the value for the property. * @see <a * href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/ClientConfiguration.html">com.aws.ClientConfiguration</a>. * @deprecated Please use {@link #withProperty(String, String)} instead. */ @Deprecated public KinesisIngressBuilder<T> withClientConfigurationProperty(String key, String value) { Objects.requireNonNull(key); Objects.requireNonNull(value); this.properties.setProperty(key, value); return this; } public KinesisIngressBuilder<T> withProperty(String key, String value) { Objects.requireNonNull(key); Objects.requireNonNull(value); this.properties.setProperty(key, value); return this; } public KinesisIngressBuilder<T> withProperties(Properties properties) { Objects.requireNonNull(properties); this.properties.putAll(properties); return this; } /** @return A new {@link KinesisIngressSpec}. */ public KinesisIngressSpec<T> build() { return new KinesisIngressSpec<>( id, streams, deserializer, startupPosition, awsRegion, awsCredentials, properties); } // ======================================================================================== // Methods for runtime usage // ======================================================================================== @ForRuntime KinesisIngressBuilder<T> withDeserializer(KinesisIngressDeserializer<T> deserializer) { this.deserializer = Objects.requireNonNull(deserializer); return this; } // ======================================================================================== // Utility methods // ======================================================================================== private static <T extends KinesisIngressDeserializer<?>> T instantiateDeserializer( Class<T> deserializerClass) { try { Constructor<T> defaultConstructor = deserializerClass.getDeclaredConstructor(); defaultConstructor.setAccessible(true); return defaultConstructor.newInstance(); } catch (NoSuchMethodException e) { throw new IllegalStateException( "Unable to create an instance of deserializer " + deserializerClass.getName() + "; has no default constructor", e); } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { throw new IllegalStateException( "Unable to create an instance of deserializer " + deserializerClass.getName(), e); } } }
5,787
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/ingress/IngressRecord.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.ingress; import java.util.Objects; /** A record consumed from AWS Kinesis. */ public final class IngressRecord { private final byte[] data; private final String stream; private final String shardId; private final String partitionKey; private final String sequenceNumber; private final long approximateArrivalTimestamp; /** @return A builder for a {@link IngressRecord}. */ public static Builder newBuilder() { return new Builder(); } private IngressRecord( byte[] data, String stream, String shardId, String partitionKey, String sequenceNumber, long approximateArrivalTimestamp) { this.data = Objects.requireNonNull(data, "data bytes"); this.stream = Objects.requireNonNull(stream, "source stream"); this.shardId = Objects.requireNonNull(shardId, "source shard id"); this.partitionKey = Objects.requireNonNull(partitionKey, "partition key"); this.sequenceNumber = Objects.requireNonNull(sequenceNumber, "sequence number"); this.approximateArrivalTimestamp = approximateArrivalTimestamp; } /** @return consumed data bytes */ public byte[] getData() { return data; } /** @return source AWS Kinesis stream */ public String getStream() { return stream; } /** @return source AWS Kinesis stream shard */ public String getShardId() { return shardId; } /** @return attached partition key */ public String getPartitionKey() { return partitionKey; } /** @return sequence number of the consumed record */ public String getSequenceNumber() { return sequenceNumber; } /** * @return approximate arrival timestamp (ingestion time at AWS Kinesis) of the consumed record */ public long getApproximateArrivalTimestamp() { return approximateArrivalTimestamp; } /** Builder for {@link IngressRecord}. */ public static final class Builder { private byte[] data; private String stream; private String shardId; private String partitionKey; private String sequenceNumber; long approximateArrivalTimestamp; private Builder() {} public Builder withData(byte[] data) { this.data = data; return this; } public Builder withStream(String stream) { this.stream = stream; return this; } public Builder withShardId(String shardId) { this.shardId = shardId; return this; } public Builder withPartitionKey(String partitionKey) { this.partitionKey = partitionKey; return this; } public Builder withSequenceNumber(String sequenceNumber) { this.sequenceNumber = sequenceNumber; return this; } public Builder withApproximateArrivalTimestamp(long approximateArrivalTimestamp) { this.approximateArrivalTimestamp = approximateArrivalTimestamp; return this; } public IngressRecord build() { return new IngressRecord( data, stream, shardId, partitionKey, sequenceNumber, approximateArrivalTimestamp); } } }
5,788
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/auth/AwsRegion.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.auth; import java.net.URI; import java.util.Objects; /** AWS region to use for connecting to AWS Kinesis. */ public abstract class AwsRegion { private AwsRegion() {} /** Consults AWS's default provider chain to determine the AWS region. */ public static AwsRegion fromDefaultProviderChain() { return DefaultAwsRegion.INSTANCE; } /** Specifies an AWS region using the region's unique id. */ public static AwsRegion ofId(String id) { return new SpecificIdAwsRegion(id); } /** * Connects to an AWS region through a non-standard AWS service endpoint. This is typically used * only for development and testing purposes. */ public static AwsRegion ofCustomEndpoint(String serviceEndpoint, String regionId) { return new CustomEndpointAwsRegion(serviceEndpoint, regionId); } /** Checks whether the region is configured to be obtained from AWS's default provider chain. */ public boolean isDefault() { return getClass() == DefaultAwsRegion.class; } /** Checks whether the region is specified with the region's unique id. */ public boolean isId() { return getClass() == SpecificIdAwsRegion.class; } /** Checks whether the region is specified with a custom non-standard AWS service endpoint. */ public boolean isCustomEndpoint() { return getClass() == CustomEndpointAwsRegion.class; } /** Returns this region as a {@link SpecificIdAwsRegion}. */ public SpecificIdAwsRegion asId() { if (!isId()) { throw new IllegalStateException( "This is not an AWS region specified with using the region's unique id."); } return (SpecificIdAwsRegion) this; } /** Returns this region as a {@link CustomEndpointAwsRegion}. */ public CustomEndpointAwsRegion asCustomEndpoint() { if (!isCustomEndpoint()) { throw new IllegalStateException( "This is not an AWS region specified with a custom endpoint."); } return (CustomEndpointAwsRegion) this; } public static final class DefaultAwsRegion extends AwsRegion { private static final DefaultAwsRegion INSTANCE = new DefaultAwsRegion(); } public static final class SpecificIdAwsRegion extends AwsRegion { private final String regionId; SpecificIdAwsRegion(String regionId) { this.regionId = Objects.requireNonNull(regionId); } public String id() { return regionId; } } public static final class CustomEndpointAwsRegion extends AwsRegion { private final String serviceEndpoint; private final String regionId; CustomEndpointAwsRegion(String serviceEndpoint, String regionId) { this.serviceEndpoint = requireValidEndpoint(serviceEndpoint); this.regionId = Objects.requireNonNull(regionId); } public String serviceEndpoint() { return serviceEndpoint; } public String regionId() { return regionId; } private static String requireValidEndpoint(String serviceEndpoint) { Objects.requireNonNull(serviceEndpoint); final URI uri = URI.create(serviceEndpoint); if (!uri.getScheme().equalsIgnoreCase("https")) { throw new IllegalArgumentException( "Invalid service endpoint url: " + serviceEndpoint + "; Only custom service endpoints using HTTPS are supported"); } return serviceEndpoint; } } }
5,789
0
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis
Create_ds/flink-statefun/statefun-kinesis-io/src/main/java/org/apache/flink/statefun/sdk/kinesis/auth/AwsCredentials.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kinesis.auth; import java.util.Objects; import java.util.Optional; import javax.annotation.Nullable; /** AWS credentials to use for connecting to AWS Kinesis. */ public abstract class AwsCredentials { private AwsCredentials() {} /** Consults AWS's default provider chain to determine the AWS credentials. */ public static AwsCredentials fromDefaultProviderChain() { return DefaultAwsCredentials.INSTANCE; } /** * Specifies the AWS credentials directly with provided access key ID and secret access key * strings. */ public static AwsCredentials basic(String accessKeyId, String secretAccessKey) { return new BasicAwsCredentials(accessKeyId, secretAccessKey); } /** Specifies the AWS credentials using an AWS configuration profile. */ public static AwsCredentials profile(String profileName) { return new ProfileAwsCredentials(profileName, null); } /** * Specifies the AWS credentials using an AWS configuration profile, along with the profile's * configuration path. */ public static AwsCredentials profile(String profileName, String profilePath) { return new ProfileAwsCredentials(profileName, profilePath); } /** * Checks whether the credentials is configured to be obtained from AWS's default provider chain. */ public boolean isDefault() { return getClass() == DefaultAwsCredentials.class; } /** * Checks whether the credentials is specified using directly provided access key ID and secret * access key strings. */ public boolean isBasic() { return getClass() == BasicAwsCredentials.class; } /** Checks whether the credentials is configured using AWS configuration profiles. */ public boolean isProfile() { return getClass() == ProfileAwsCredentials.class; } /** Returns this as a {@link BasicAwsCredentials}. */ public BasicAwsCredentials asBasic() { if (!isBasic()) { throw new IllegalStateException( "This AWS credential is not defined with basic access key id and secret key."); } return (BasicAwsCredentials) this; } /** Returns this as a {@link ProfileAwsCredentials}. */ public ProfileAwsCredentials asProfile() { if (!isProfile()) { throw new IllegalStateException( "This AWS credential is not defined with a AWS configuration profile"); } return (ProfileAwsCredentials) this; } public static final class DefaultAwsCredentials extends AwsCredentials { private static final DefaultAwsCredentials INSTANCE = new DefaultAwsCredentials(); } public static final class BasicAwsCredentials extends AwsCredentials { private final String accessKeyId; private final String secretAccessKey; BasicAwsCredentials(String accessKeyId, String secretAccessKey) { this.accessKeyId = Objects.requireNonNull(accessKeyId); this.secretAccessKey = Objects.requireNonNull(secretAccessKey); } public String accessKeyId() { return accessKeyId; } public String secretAccessKey() { return secretAccessKey; } } public static final class ProfileAwsCredentials extends AwsCredentials { private final String profileName; @Nullable private final String profilePath; ProfileAwsCredentials(String profileName, @Nullable String profilePath) { this.profileName = Objects.requireNonNull(profileName); this.profilePath = profilePath; } public String name() { return profileName; } public Optional<String> path() { return Optional.ofNullable(profilePath); } } }
5,790
0
Create_ds/flink-statefun/statefun-kafka-io/src/test/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/test/java/org/apache/flink/statefun/sdk/kafka/KafkaIngressBuilderTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import static org.apache.flink.statefun.sdk.kafka.testutils.Matchers.hasProperty; import static org.apache.flink.statefun.sdk.kafka.testutils.Matchers.isMapOfSize; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.contains; import static org.junit.Assert.assertThat; import java.util.Properties; import org.apache.flink.statefun.sdk.io.IngressIdentifier; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.junit.Test; public class KafkaIngressBuilderTest { private static final IngressIdentifier<String> DUMMY_ID = new IngressIdentifier<>(String.class, "ns", "name"); @Test public void idIsCorrect() { KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class); KafkaIngressSpec<String> spec = builder.build(); assertThat(spec.id(), is(DUMMY_ID)); } @Test public void ingressTypeIsCorrect() { KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class); KafkaIngressSpec<String> spec = builder.build(); assertThat(spec.type(), is(Constants.KAFKA_INGRESS_TYPE)); } @Test public void topicsIsCorrect() { KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class); KafkaIngressSpec<String> spec = builder.build(); assertThat(spec.topics(), contains("topic")); } @Test public void deserializerIsCorrect() { KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class); KafkaIngressSpec<String> spec = builder.build(); assertThat(spec.deserializer(), instanceOf(NoOpDeserializer.class)); } @Test public void startupPositionIsCorrect() { KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class); KafkaIngressSpec<String> spec = builder.build(); assertThat(spec.startupPosition(), is(KafkaIngressStartupPosition.fromLatest())); } @Test public void propertiesIsCorrect() { KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class); KafkaIngressSpec<String> spec = builder.build(); assertThat( spec.properties(), allOf( isMapOfSize(3), hasProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:8082"), hasProperty(ConsumerConfig.GROUP_ID_CONFIG, "test-group"), hasProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"))); } @Test public void namedMethodConfigValuesOverwriteProperties() { Properties properties = new Properties(); properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "should-be-overwritten"); KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class) .withProperties(properties); KafkaIngressSpec<String> spec = builder.build(); assertThat( spec.properties(), hasProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:8082")); } @Test public void defaultNamedMethodConfigValuesShouldNotOverwriteProperties() { Properties properties = new Properties(); properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); KafkaIngressBuilder<String> builder = KafkaIngressBuilder.forIdentifier(DUMMY_ID) .withKafkaAddress("localhost:8082") .withTopic("topic") .withConsumerGroupId("test-group") .withDeserializer(NoOpDeserializer.class) .withProperties(properties); KafkaIngressSpec<String> spec = builder.build(); assertThat(spec.properties(), hasProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")); } private static class NoOpDeserializer implements KafkaIngressDeserializer<String> { @Override public String deserialize(ConsumerRecord<byte[], byte[]> input) { return null; } } }
5,791
0
Create_ds/flink-statefun/statefun-kafka-io/src/test/java/org/apache/flink/statefun/sdk/kafka
Create_ds/flink-statefun/statefun-kafka-io/src/test/java/org/apache/flink/statefun/sdk/kafka/testutils/Matchers.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka.testutils; import java.util.Map; import java.util.Objects; import java.util.Properties; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; public final class Matchers { private Matchers() {} public static <K, V> Matcher<Map<K, V>> isMapOfSize(int size) { return new TypeSafeMatcher<Map<K, V>>() { @Override protected boolean matchesSafely(Map<K, V> map) { return map.size() == size; } @Override public void describeTo(Description description) {} }; } public static Matcher<Properties> hasProperty(String key, String value) { return new TypeSafeMatcher<Properties>() { @Override protected boolean matchesSafely(Properties properties) { return Objects.equals(properties.getProperty(key), value); } @Override public void describeTo(Description description) { description.appendText("<" + key + "=" + value + ">"); } }; } }
5,792
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaTopicPartition.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.util.Objects; /** Representation of a Kafka partition. */ public final class KafkaTopicPartition { private final String topic; private final int partition; public static KafkaTopicPartition fromString(String topicAndPartition) { Objects.requireNonNull(topicAndPartition); final int pos = topicAndPartition.lastIndexOf("/"); if (pos <= 0 || pos == topicAndPartition.length() - 1) { throw new IllegalArgumentException( topicAndPartition + " does not conform to the <topic>/<partition_id> format"); } String topic = topicAndPartition.substring(0, pos); Integer partitionId; try { partitionId = Integer.valueOf(topicAndPartition.substring(pos + 1)); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Invalid topic partition definition: " + topicAndPartition + "; partition id is expected to be an integer with value between 0 and " + Integer.MAX_VALUE, e); } if (partitionId < 0) { throw new IllegalArgumentException( "Invalid topic partition definition: " + topicAndPartition + "; partition id is expected to be an integer with value between 0 and " + Integer.MAX_VALUE); } return new KafkaTopicPartition(topic, partitionId); } public KafkaTopicPartition(String topic, int partition) { this.topic = Objects.requireNonNull(topic); if (partition < 0) { throw new IllegalArgumentException( "Invalid partition id: " + partition + "; value must be larger or equal to 0."); } this.partition = partition; } public String topic() { return topic; } public int partition() { return partition; } @Override public String toString() { return "KafkaTopicPartition{" + "topic='" + topic + '\'' + ", partition=" + partition + '}'; } @Override public int hashCode() { return 31 * topic.hashCode() + partition; } @Override public boolean equals(Object o) { if (o == null) { return false; } if (o == this) { return true; } if (!(o instanceof KafkaTopicPartition)) { return false; } KafkaTopicPartition that = (KafkaTopicPartition) o; return this.partition == that.partition && this.topic.equals(that.topic); } }
5,793
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaIngressBuilder.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.annotations.ForRuntime; import org.apache.flink.statefun.sdk.core.OptionalProperty; import org.apache.flink.statefun.sdk.io.IngressIdentifier; import org.apache.flink.statefun.sdk.io.IngressSpec; import org.apache.kafka.clients.consumer.ConsumerConfig; /** * A builder for creating an {@link IngressSpec} for consuming data from Apache Kafka. * * @param <T> The type consumed from Kafka. */ public final class KafkaIngressBuilder<T> { private final IngressIdentifier<T> id; private final List<String> topics = new ArrayList<>(); private final Properties properties = new Properties(); private OptionalProperty<String> consumerGroupId = OptionalProperty.withoutDefault(); private OptionalProperty<KafkaIngressDeserializer<T>> deserializer = OptionalProperty.withoutDefault(); private OptionalProperty<String> kafkaAddress = OptionalProperty.withoutDefault(); private OptionalProperty<KafkaIngressAutoResetPosition> autoResetPosition = OptionalProperty.withDefault(KafkaIngressAutoResetPosition.LATEST); private OptionalProperty<KafkaIngressStartupPosition> startupPosition = OptionalProperty.withDefault(KafkaIngressStartupPosition.fromLatest()); private KafkaIngressBuilder(IngressIdentifier<T> id) { this.id = Objects.requireNonNull(id); } /** * @param id A unique ingress identifier. * @param <T> The type consumed from Kafka. * @return A new {@link KafkaIngressBuilder}. */ public static <T> KafkaIngressBuilder<T> forIdentifier(IngressIdentifier<T> id) { return new KafkaIngressBuilder<>(id); } /** @param consumerGroupId the consumer group id to use. */ public KafkaIngressBuilder<T> withConsumerGroupId(String consumerGroupId) { this.consumerGroupId.set(consumerGroupId); return this; } /** @param kafkaAddress Comma separated addresses of the brokers. */ public KafkaIngressBuilder<T> withKafkaAddress(String kafkaAddress) { this.kafkaAddress.set(kafkaAddress); return this; } /** @param topic The name of the topic that should be consumed. */ public KafkaIngressBuilder<T> withTopic(String topic) { topics.add(topic); return this; } /** @param topics A list of topics that should be consumed. */ public KafkaIngressBuilder<T> addTopics(List<String> topics) { this.topics.addAll(topics); return this; } /** A configuration property for the KafkaConsumer. */ public KafkaIngressBuilder<T> withProperties(Properties properties) { this.properties.putAll(properties); return this; } /** A configuration property for the KafkaProducer. */ public KafkaIngressBuilder<T> withProperty(String name, String value) { Objects.requireNonNull(name); Objects.requireNonNull(value); this.properties.setProperty(name, value); return this; } /** * @param deserializerClass The deserializer used to convert between Kafka's byte messages and * java objects. */ public KafkaIngressBuilder<T> withDeserializer( Class<? extends KafkaIngressDeserializer<T>> deserializerClass) { Objects.requireNonNull(deserializerClass); this.deserializer.set(instantiateDeserializer(deserializerClass)); return this; } /** * @param autoResetPosition the auto offset reset position to use, in case consumed offsets are * invalid. */ public KafkaIngressBuilder<T> withAutoResetPosition( KafkaIngressAutoResetPosition autoResetPosition) { this.autoResetPosition.set(autoResetPosition); return this; } /** * Configures the position that the ingress should start consuming from. By default, the startup * position is {@link KafkaIngressStartupPosition#fromLatest()}. * * <p>Note that this configuration only affects the position when starting the application from a * fresh start. When restoring the application from a savepoint, the ingress will always start * consuming from the offsets persisted in the savepoint. * * @param startupPosition the position that the Kafka ingress should start consuming from. * @see KafkaIngressStartupPosition */ public KafkaIngressBuilder<T> withStartupPosition(KafkaIngressStartupPosition startupPosition) { this.startupPosition.set(startupPosition); return this; } /** @return A new {@link KafkaIngressSpec}. */ public KafkaIngressSpec<T> build() { Properties properties = resolveKafkaProperties(); return new KafkaIngressSpec<>( id, properties, topics, deserializer.get(), startupPosition.get()); } private Properties resolveKafkaProperties() { Properties resultProps = new Properties(); resultProps.putAll(properties); // for all configuration passed using named methods, overwrite corresponding properties kafkaAddress.overwritePropertiesIfPresent(resultProps, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); autoResetPosition.overwritePropertiesIfPresent( resultProps, ConsumerConfig.AUTO_OFFSET_RESET_CONFIG); consumerGroupId.overwritePropertiesIfPresent(resultProps, ConsumerConfig.GROUP_ID_CONFIG); return resultProps; } private static <T extends KafkaIngressDeserializer<?>> T instantiateDeserializer( Class<T> deserializerClass) { try { Constructor<T> defaultConstructor = deserializerClass.getDeclaredConstructor(); defaultConstructor.setAccessible(true); return defaultConstructor.newInstance(); } catch (NoSuchMethodException e) { throw new IllegalStateException( "Unable to create an instance of deserializer " + deserializerClass.getName() + "; has no default constructor", e); } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { throw new IllegalStateException( "Unable to create an instance of deserializer " + deserializerClass.getName(), e); } } // ======================================================================================== // Methods for runtime usage // ======================================================================================== @ForRuntime KafkaIngressBuilder<T> withDeserializer(KafkaIngressDeserializer<T> deserializer) { this.deserializer.set(deserializer); return this; } }
5,794
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaIngressDeserializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.io.Serializable; import org.apache.kafka.clients.consumer.ConsumerRecord; /** * The deserialization schema describes how to turn the Kafka ConsumerRecords into data types that * are processed by the system. * * @param <T> The type created by the keyed deserialization schema. */ public interface KafkaIngressDeserializer<T> extends Serializable { /** * Deserializes the Kafka record. * * @param input Kafka record to be deserialized. * @return The deserialized message as an object (null if the message cannot be deserialized). */ T deserialize(ConsumerRecord<byte[], byte[]> input); }
5,795
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaEgressSerializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.io.Serializable; import org.apache.kafka.clients.producer.ProducerRecord; /** * A {@link KafkaEgressSerializer} defines how to serialize values of type {@code T} into {@link * ProducerRecord ProducerRecords}. * * @param <OutT> the type of values being serialized */ public interface KafkaEgressSerializer<OutT> extends Serializable { /** * Serializes given element and returns it as a {@link ProducerRecord}. * * @param t element to be serialized * @return Kafka {@link ProducerRecord} */ ProducerRecord<byte[], byte[]> serialize(OutT t); }
5,796
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaEgressBuilder.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.time.Duration; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.io.EgressIdentifier; import org.apache.flink.statefun.sdk.io.EgressSpec; /** * A builder class for creating an {@link EgressSpec} that writes data out to a Kafka cluster. By * default the egress will use {@link #withAtLeastOnceProducerSemantics()}. * * @param <OutT> The type written out to the cluster by the Egress. */ public final class KafkaEgressBuilder<OutT> { private final EgressIdentifier<OutT> id; private Class<? extends KafkaEgressSerializer<OutT>> serializer; private String kafkaAddress; private Properties properties = new Properties(); private int kafkaProducerPoolSize = 5; private KafkaProducerSemantic semantic = KafkaProducerSemantic.atLeastOnce(); private KafkaEgressBuilder(EgressIdentifier<OutT> id) { this.id = Objects.requireNonNull(id); } /** * @param egressIdentifier A unique egress identifier. * @param <OutT> The type the egress will output. * @return A {@link KafkaIngressBuilder}. */ public static <OutT> KafkaEgressBuilder<OutT> forIdentifier( EgressIdentifier<OutT> egressIdentifier) { return new KafkaEgressBuilder<>(egressIdentifier); } /** @param kafkaAddress Comma separated addresses of the brokers. */ public KafkaEgressBuilder<OutT> withKafkaAddress(String kafkaAddress) { this.kafkaAddress = Objects.requireNonNull(kafkaAddress); return this; } /** A configuration property for the KafkaProducer. */ public KafkaEgressBuilder<OutT> withProperty(String key, String value) { Objects.requireNonNull(key); Objects.requireNonNull(value); properties.setProperty(key, value); return this; } /** Configuration properties for the KafkaProducer. */ public KafkaEgressBuilder<OutT> withProperties(Properties properties) { Objects.requireNonNull(properties); this.properties.putAll(properties); return this; } /** * @param serializer A serializer schema for turning user objects into a kafka-consumable byte[] * supporting key/value messages. */ public KafkaEgressBuilder<OutT> withSerializer( Class<? extends KafkaEgressSerializer<OutT>> serializer) { this.serializer = Objects.requireNonNull(serializer); return this; } /** @param poolSize Overwrite default KafkaProducers pool size. The default is 5. */ public KafkaEgressBuilder<OutT> withKafkaProducerPoolSize(int poolSize) { this.kafkaProducerPoolSize = poolSize; return this; } /** * KafkaProducerSemantic.EXACTLY_ONCE the egress will write all messages in a Kafka transaction * that will be committed to Kafka on a checkpoint. * * <p>With exactly-once producer semantics, users must also specify the transaction timeout. Note * that this value must not be larger than the {@code transaction.max.timeout.ms} value configured * on Kafka brokers (by default, this is 15 minutes). * * @param transactionTimeoutDuration the transaction timeout. */ public KafkaEgressBuilder<OutT> withExactlyOnceProducerSemantics( Duration transactionTimeoutDuration) { this.semantic = KafkaProducerSemantic.exactlyOnce(transactionTimeoutDuration); return this; } /** * KafkaProducerSemantic.AT_LEAST_ONCE the egress will wait for all outstanding messages in the * Kafka buffers to be acknowledged by the Kafka producer on a checkpoint. */ public KafkaEgressBuilder<OutT> withAtLeastOnceProducerSemantics() { this.semantic = KafkaProducerSemantic.atLeastOnce(); return this; } /** * KafkaProducerSemantic.NONE means that nothing will be guaranteed. Messages can be lost and/or * duplicated in case of failure. */ public KafkaEgressBuilder<OutT> withNoProducerSemantics() { this.semantic = KafkaProducerSemantic.none(); return this; } public KafkaEgressBuilder<OutT> withProducerSemantic(KafkaProducerSemantic producerSemantic) { this.semantic = Objects.requireNonNull(producerSemantic); return this; } /** @return An {@link EgressSpec} that can be used in a {@code StatefulFunctionModule}. */ public KafkaEgressSpec<OutT> build() { return new KafkaEgressSpec<>( id, serializer, kafkaAddress, properties, kafkaProducerPoolSize, semantic); } }
5,797
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaProducerSemantic.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.time.Duration; import java.util.Objects; public abstract class KafkaProducerSemantic { public static KafkaProducerSemantic exactlyOnce(Duration transactionTimeout) { return new ExactlyOnce(transactionTimeout); } public static KafkaProducerSemantic atLeastOnce() { return new AtLeastOnce(); } public static KafkaProducerSemantic none() { return new NoSemantics(); } public boolean isExactlyOnceSemantic() { return getClass() == ExactlyOnce.class; } public ExactlyOnce asExactlyOnceSemantic() { return (ExactlyOnce) this; } public boolean isAtLeastOnceSemantic() { return getClass() == AtLeastOnce.class; } public AtLeastOnce asAtLeastOnceSemantic() { return (AtLeastOnce) this; } public boolean isNoSemantic() { return getClass() == NoSemantics.class; } public NoSemantics asNoSemantic() { return (NoSemantics) this; } public static class ExactlyOnce extends KafkaProducerSemantic { private final Duration transactionTimeout; private ExactlyOnce(Duration transactionTimeout) { if (transactionTimeout == Duration.ZERO) { throw new IllegalArgumentException( "Transaction timeout durations must be larger than 0 when using exactly-once producer semantics."); } this.transactionTimeout = Objects.requireNonNull(transactionTimeout); } public Duration transactionTimeout() { return transactionTimeout; } } public static class AtLeastOnce extends KafkaProducerSemantic { private AtLeastOnce() {} } public static class NoSemantics extends KafkaProducerSemantic { private NoSemantics() {} } }
5,798
0
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk
Create_ds/flink-statefun/statefun-kafka-io/src/main/java/org/apache/flink/statefun/sdk/kafka/KafkaEgressSpec.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.statefun.sdk.kafka; import java.util.Objects; import java.util.Properties; import org.apache.flink.statefun.sdk.EgressType; import org.apache.flink.statefun.sdk.io.EgressIdentifier; import org.apache.flink.statefun.sdk.io.EgressSpec; public final class KafkaEgressSpec<OutT> implements EgressSpec<OutT> { private final Class<? extends KafkaEgressSerializer<OutT>> serializer; private final String kafkaAddress; private final Properties properties; private final EgressIdentifier<OutT> id; private final int kafkaProducerPoolSize; private final KafkaProducerSemantic semantic; KafkaEgressSpec( EgressIdentifier<OutT> id, Class<? extends KafkaEgressSerializer<OutT>> serializer, String kafkaAddress, Properties properties, int kafkaProducerPoolSize, KafkaProducerSemantic semantic) { this.serializer = Objects.requireNonNull(serializer); this.kafkaAddress = Objects.requireNonNull(kafkaAddress); this.properties = Objects.requireNonNull(properties); this.id = Objects.requireNonNull(id); this.kafkaProducerPoolSize = kafkaProducerPoolSize; this.semantic = Objects.requireNonNull(semantic); } @Override public EgressIdentifier<OutT> id() { return id; } @Override public EgressType type() { return Constants.KAFKA_EGRESS_TYPE; } public Class<? extends KafkaEgressSerializer<OutT>> serializerClass() { return serializer; } public String kafkaAddress() { return kafkaAddress; } public Properties properties() { return properties; } public int kafkaProducerPoolSize() { return kafkaProducerPoolSize; } public KafkaProducerSemantic semantic() { return semantic; } }
5,799