index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/impl/FirehoseProducer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl;
import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.FlinkKinesisFirehoseException;
import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.RecordCouldNotBeSentException;
import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.TimeoutExpiredException;
import com.amazonaws.services.kinesisanalytics.flink.connectors.producer.IProducer;
import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehose;
import com.amazonaws.services.kinesisfirehose.model.AmazonKinesisFirehoseException;
import com.amazonaws.services.kinesisfirehose.model.PutRecordBatchRequest;
import com.amazonaws.services.kinesisfirehose.model.PutRecordBatchResponseEntry;
import com.amazonaws.services.kinesisfirehose.model.PutRecordBatchResult;
import com.amazonaws.services.kinesisfirehose.model.Record;
import com.amazonaws.services.kinesisfirehose.model.ServiceUnavailableException;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import java.util.ArrayDeque;
import java.util.Properties;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl.FirehoseProducer.UserRecordResult;
@ThreadSafe
public class FirehoseProducer<O extends UserRecordResult, R extends Record> implements IProducer<O, R> {
private static final Logger LOGGER = LoggerFactory.getLogger(FirehoseProducer.class);
/** A configuration object holding validated properties for the producer */
private final FirehoseProducerConfiguration configuration;
/** AWS Kinesis Firehose client */
private final AmazonKinesisFirehose firehoseClient;
/** Firehose delivery stream */
private final String deliveryStream;
/** Scheduler service responsible for flushing the producer Buffer pool */
private final ExecutorService flusher;
/** Object lock responsible for guarding the producer Buffer pool */
@GuardedBy("this")
private final Object producerBufferLock = new Object();
/** Producer Buffer pool */
private volatile Queue<Record> producerBuffer;
/** Flusher Buffer pool */
private volatile Queue<Record> flusherBuffer;
/** A timestamp responsible to store the last timestamp after the flusher thread has been performed */
private volatile long lastSucceededFlushTimestamp;
/** Reports if the Firehose Producer was destroyed, shutting down the flusher thread. */
private volatile boolean isDestroyed;
/** A sentinel flag to notify the flusher thread to flush the buffer immediately.
* This flag should be used only to request a flush from the caller thread through the {@link #flush()} method. */
private volatile boolean syncFlush;
/** A flag representing if the Flusher thread has failed. */
private volatile boolean isFlusherFailed;
public FirehoseProducer(@Nonnull final String deliveryStream,
@Nonnull final AmazonKinesisFirehose firehoseClient,
@Nonnull final Properties config) {
this(deliveryStream, firehoseClient, FirehoseProducerConfiguration.builder(config).build());
}
public FirehoseProducer(@Nonnull final String deliveryStream,
@Nonnull final AmazonKinesisFirehose firehoseClient,
@Nonnull final FirehoseProducerConfiguration configuration) {
this.firehoseClient = Validate.notNull(firehoseClient, "Kinesis Firehose client cannot be null");
this.deliveryStream = Validate.notBlank(deliveryStream, "Kinesis Firehose delivery stream cannot be null or empty.");
this.configuration = configuration;
this.producerBuffer = new ArrayDeque<>(configuration.getMaxBufferSize());
this.flusherBuffer = new ArrayDeque<>(configuration.getMaxBufferSize());
flusher = Executors.newSingleThreadExecutor(new FirehoseThreadFactory());
flusher.submit(this::flushBuffer);
}
@Override
public CompletableFuture<O> addUserRecord(final R record) throws Exception {
return addUserRecord(record, configuration.getMaxOperationTimeoutInMillis());
}
/**
* This method is responsible for taking a lock adding a {@code Record} into the producerBuffer, in case the producerBuffer is full
* waits releasing the lock for the given {@code bufferFullWaitTimeoutInMillis}.
* There are cases where the producerBuffer cannot be flushed then this method keeps waiting until the given operation timeout
* passed as {@code timeoutInMillis}
* @param record the type of data to be buffered
* @param timeoutInMillis the operation timeout in case the record cannot be added into the producerBuffer.
* @return
* @throws TimeoutExpiredException if the operation got stuck and is not able to proceed.
* @throws InterruptedException if any thread interrupted the current thread before or while the current thread
* was waiting for a notification. The <i>interrupted status</i> of the current thread is cleared when
* this exception is thrown.
*/
@Override
public CompletableFuture<O> addUserRecord(final R record, final long timeoutInMillis)
throws TimeoutExpiredException, InterruptedException {
Validate.notNull(record, "Record cannot be null.");
Validate.isTrue(timeoutInMillis > 0, "Operation timeout should be > 0.");
long operationTimeoutInNanos = TimeUnit.MILLISECONDS.toNanos(timeoutInMillis);
synchronized (producerBufferLock) {
/* This happens whenever the current thread is trying to write, however, the Producer Buffer is full.
* This guarantees if the writer thread is already running, should wait.
* In addition, implements a kind of back pressure mechanism with a bail out condition, so we don't incur
* in cases where the current thread waits forever.
*/
long lastTimestamp = System.nanoTime();
while (producerBuffer.size() >= configuration.getMaxBufferSize()) {
if ((System.nanoTime() - lastTimestamp) >= operationTimeoutInNanos) {
throw new TimeoutExpiredException("Timeout has expired for the given operation");
}
/* If the buffer is filled and the flusher isn't running yet we notify to wake up the flusher */
if (flusherBuffer.isEmpty()) {
producerBufferLock.notify();
}
producerBufferLock.wait(configuration.getBufferFullWaitTimeoutInMillis());
}
producerBuffer.offer(record);
/* If the buffer was filled up right after the last insertion we would like to wake up the flusher thread
* and send the buffered data to Kinesis Firehose as soon as possible */
if (producerBuffer.size() >= configuration.getMaxBufferSize() && flusherBuffer.isEmpty()) {
producerBufferLock.notify();
}
}
UserRecordResult recordResult = new UserRecordResult().setSuccessful(true);
return CompletableFuture.completedFuture((O) recordResult);
}
/**
* This method runs in a background thread responsible for flushing the Producer Buffer in case the buffer is full,
* not enough records into the buffer and timeout has expired or flusher timeout has expired.
* If an unhandled exception is thrown the flusher thread should fail, logging the failure.
* However, this behavior will block the producer to move on until hit the given timeout and throw {@code {@link TimeoutExpiredException}}
*/
private void flushBuffer() {
lastSucceededFlushTimestamp = System.nanoTime();
long bufferTimeoutInNanos = TimeUnit.MILLISECONDS.toNanos(configuration.getBufferTimeoutInMillis());
boolean timeoutFlush;
while (true) {
timeoutFlush = (System.nanoTime() - lastSucceededFlushTimestamp) >= bufferTimeoutInNanos;
synchronized (producerBufferLock) {
/* If the flusher buffer is not empty at this point we should fail, otherwise we would end up looping
* forever since we are swapping references */
Validate.validState(flusherBuffer.isEmpty());
if (isDestroyed) {
return;
} else if (syncFlush || (producerBuffer.size() >= configuration.getMaxBufferSize() ||
(timeoutFlush && producerBuffer.size() > 0))) {
prepareRecordsToSubmit(producerBuffer, flusherBuffer);
producerBufferLock.notify();
} else {
try {
producerBufferLock.wait(configuration.getBufferTimeoutBetweenFlushes());
} catch (InterruptedException e) {
LOGGER.info("An interrupted exception has been thrown, while trying to sleep and release the lock during a flush.", e);
}
continue;
}
}
/* It's OK calling {@code submitBatchWithRetry} outside the critical section because this method does not make
* any changes to the object and the producer thread does not make any modifications to the flusherBuffer.
* The only agent making changes to flusherBuffer is the flusher thread. */
try {
submitBatchWithRetry(flusherBuffer);
Queue<Record> emptyFlushBuffer = new ArrayDeque<>(configuration.getMaxBufferSize());
synchronized (producerBufferLock) {
/* We perform a swap at this point because {@code ArrayDeque<>.clear()} iterates over the items nullifying
* the items, and we would like to avoid such iteration just swapping references. */
Validate.validState(!flusherBuffer.isEmpty());
flusherBuffer = emptyFlushBuffer;
if (syncFlush) {
syncFlush = false;
producerBufferLock.notify();
}
}
} catch (Exception ex) {
String errorMsg = "An error has occurred while trying to send data to Kinesis Firehose.";
if (ex instanceof AmazonKinesisFirehoseException &&
((AmazonKinesisFirehoseException) ex).getStatusCode() == 413) {
LOGGER.error(errorMsg +
"Batch of records too large. Please try to reduce your batch size by passing " +
"FIREHOSE_PRODUCER_BUFFER_MAX_SIZE into your configuration.", ex);
} else {
LOGGER.error(errorMsg, ex);
}
synchronized (producerBufferLock) {
isFlusherFailed = true;
}
throw ex;
}
}
}
/**
* Populates the target queue with messages from the source queue.
* Up to the maximum capacity defined by {@code maxPutRecordBatchBytes}.
*/
private void prepareRecordsToSubmit(@Nonnull final Queue<Record> sourceQueue, @Nonnull final Queue<Record> targetQueue) {
int total = 0;
while (!sourceQueue.isEmpty() && (total + sourceQueue.peek().getData().capacity()) <= configuration.getMaxPutRecordBatchBytes()) {
total += sourceQueue.peek().getData().capacity();
targetQueue.add(sourceQueue.poll());
}
}
private void submitBatchWithRetry(final Queue<Record> records) throws AmazonKinesisFirehoseException,
RecordCouldNotBeSentException {
PutRecordBatchResult lastResult;
String warnMessage = null;
for (int attempts = 0; attempts < configuration.getNumberOfRetries(); attempts++) {
try {
LOGGER.debug("Trying to flush Buffer of size: {} on attempt: {}", records.size(), attempts);
lastResult = submitBatch(records);
if (lastResult.getFailedPutCount() == null || lastResult.getFailedPutCount() == 0) {
lastSucceededFlushTimestamp = System.nanoTime();
LOGGER.debug("Firehose Buffer has been flushed with size: {} on attempt: {}",
records.size(), attempts);
return;
}
PutRecordBatchResponseEntry failedRecord = lastResult.getRequestResponses()
.stream()
.filter(r -> r.getRecordId() == null)
.findFirst()
.orElse(null);
warnMessage = String.format("Number of failed records: %s.", lastResult.getFailedPutCount());
if (failedRecord != null) {
warnMessage = String.format("Last Kinesis Firehose putRecordBatch encountered an error and failed " +
"trying to put: %s records with error: %s - %s.",
lastResult.getFailedPutCount(), failedRecord.getErrorCode(), failedRecord.getErrorMessage());
}
LOGGER.warn(warnMessage);
//Full Jitter: https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
long timeToSleep = RandomUtils.nextLong(0,
Math.min(configuration.getMaxBackOffInMillis(), (configuration.getBaseBackOffInMillis() * 2 * attempts)));
LOGGER.info("Sleeping for: {}ms on attempt: {}", timeToSleep, attempts);
Thread.sleep(timeToSleep);
} catch (ServiceUnavailableException ex) {
LOGGER.info("Kinesis Firehose has thrown a recoverable exception.", ex);
} catch (InterruptedException e) {
LOGGER.info("An interrupted exception has been thrown between retry attempts.", e);
} catch (AmazonKinesisFirehoseException ex) {
throw ex;
}
}
throw new RecordCouldNotBeSentException("Exceeded number of attempts! " + warnMessage);
}
/**
* Sends the actual batch of records to Kinesis Firehose
* @param records a Collection of records
* @return {@code PutRecordBatchResult}
*/
private PutRecordBatchResult submitBatch(final Queue<Record> records) throws AmazonKinesisFirehoseException {
LOGGER.debug("Sending {} records to Kinesis Firehose on stream: {}", records.size(), deliveryStream);
PutRecordBatchResult result;
try {
result = firehoseClient.putRecordBatch(new PutRecordBatchRequest()
.withDeliveryStreamName(deliveryStream)
.withRecords(records));
} catch (AmazonKinesisFirehoseException e) {
throw e;
}
return result;
}
/**
* Make sure that any pending scheduled thread terminates before closing as well as cleans the producerBuffer pool,
* allowing GC to collect.
*/
@Override
public void destroy() throws Exception {
synchronized (producerBufferLock) {
isDestroyed = true;
producerBuffer = null;
producerBufferLock.notify();
}
if (!flusher.isShutdown() && !flusher.isTerminated()) {
LOGGER.info("Shutting down scheduled service.");
flusher.shutdown();
try {
LOGGER.info("Awaiting executor service termination...");
flusher.awaitTermination(1L, TimeUnit.MINUTES);
} catch (InterruptedException e) {
final String errorMsg = "Error waiting executor writer termination.";
LOGGER.error(errorMsg, e);
throw new FlinkKinesisFirehoseException(errorMsg, e);
}
}
}
@Override
public boolean isDestroyed() {
synchronized (producerBufferLock) {
return isDestroyed;
}
}
@Override
public int getOutstandingRecordsCount() {
synchronized (producerBufferLock) {
return producerBuffer.size() + flusherBuffer.size();
}
}
@Override
public boolean isFlushFailed() {
synchronized (producerBufferLock) {
return isFlusherFailed;
}
}
/**
* This method instructs the flusher thread to perform a flush on the buffer without waiting for completion.
* <p>
* This implementation does not guarantee the whole buffer is flushed or if the flusher thread
* has completed the flush or not.
* In order to flush all records and wait until completion, use {@code {@link #flushSync()}}
* </p>
*/
@Override
public void flush() {
synchronized (producerBufferLock) {
syncFlush = true;
producerBufferLock.notify();
}
}
/**
* This method instructs the flusher thread to perform the flush on the buffer and wait for the completion.
* <p>
* This implementation is useful once there is a need to guarantee the buffer is flushed before making further progress.
* i.e. Shutting down the producer.
* i.e. Taking synchronous snapshots.
* </p>
* The caller needs to make sure to assert the status of {@link #isFlushFailed()} in order guarantee whether
* the flush has successfully completed or not.
*/
@Override
public void flushSync() {
while (getOutstandingRecordsCount() > 0 && !isFlushFailed()) {
flush();
try {
Thread.sleep(500);
} catch (InterruptedException e) {
LOGGER.warn("An interruption has happened while trying to flush the buffer synchronously.");
Thread.currentThread().interrupt();
}
}
if (isFlushFailed()) {
LOGGER.warn("The flusher thread has failed trying to synchronously flush the buffer.");
}
}
public static class UserRecordResult {
private Throwable exception;
private boolean successful;
public Throwable getException() {
return exception;
}
public UserRecordResult setException(Throwable exception) {
this.exception = exception;
return this;
}
public boolean isSuccessful() {
return successful;
}
public UserRecordResult setSuccessful(boolean successful) {
this.successful = successful;
return this;
}
}
static class FirehoseThreadFactory implements ThreadFactory {
/** Static threadsafe counter use to generate thread name suffix. */
private static final AtomicLong count = new AtomicLong(0);
@Override
public Thread newThread(@Nonnull final Runnable runnable) {
Thread thread = Executors.defaultThreadFactory().newThread(runnable);
thread.setName("kda-writer-thread-" + count.getAndIncrement());
thread.setDaemon(false);
return thread;
}
}
}
| 4,000 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/producer/impl/FirehoseProducerConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.producer.impl;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants;
import com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil;
import org.apache.commons.lang3.Validate;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAXIMUM_BATCH_BYTES;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAX_BUFFER_SIZE;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_BASE_BACKOFF_TIMEOUT;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_FLUSH_MAX_NUMBER_OF_RETRIES;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_FLUSH_TIMEOUT;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_FULL_WAIT_TIMEOUT;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_BACKOFF_TIMEOUT;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_SIZE;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.FIREHOSE_PRODUCER_MAX_OPERATION_TIMEOUT;
import static java.util.Optional.ofNullable;
/** An immutable configuration class for {@link FirehoseProducer}. */
public class FirehoseProducerConfiguration {
/** The default MAX producerBuffer size. Users should be able to specify a smaller producerBuffer if needed.
* However, this value should be exercised with caution, since Kinesis Firehose limits PutRecordBatch at 500 records or 4MiB per call.
* Please refer to https://docs.aws.amazon.com/firehose/latest/dev/limits.html for further reference.
* */
private final int maxBufferSize;
/** The maximum number of bytes that can be sent in a single PutRecordBatch operation */
private final int maxPutRecordBatchBytes;
/** The specified amount timeout the producerBuffer must be flushed if haven't met any other conditions previously */
private final long bufferTimeoutInMillis;
/** The wait time in milliseconds in case a producerBuffer is full */
private final long bufferFullWaitTimeoutInMillis;
/** The interval between producerBuffer flushes */
private final long bufferTimeoutBetweenFlushes;
/** The MAX number of retries in case of recoverable failures */
private final int numberOfRetries;
/** The default MAX backoff timeout
* https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
*/
private final long maxBackOffInMillis;
/** The default BASE timeout to be used on Jitter backoff
* https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
*/
private final long baseBackOffInMillis;
/** The MAX timeout for a given addUserRecord operation */
private final long maxOperationTimeoutInMillis;
private FirehoseProducerConfiguration(@Nonnull final Builder builder) {
this.maxBufferSize = builder.maxBufferSize;
this.maxPutRecordBatchBytes = builder.maxPutRecordBatchBytes;
this.bufferTimeoutInMillis = builder.bufferTimeoutInMillis;
this.bufferFullWaitTimeoutInMillis = builder.bufferFullWaitTimeoutInMillis;
this.bufferTimeoutBetweenFlushes = builder.bufferTimeoutBetweenFlushes;
this.numberOfRetries = builder.numberOfRetries;
this.maxBackOffInMillis = builder.maxBackOffInMillis;
this.baseBackOffInMillis = builder.baseBackOffInMillis;
this.maxOperationTimeoutInMillis = builder.maxOperationTimeoutInMillis;
}
/**
* The max producer buffer size; the maximum number of records that will be sent in a PutRecordBatch request.
* @return the max producer buffer size.
*/
public int getMaxBufferSize() {
return maxBufferSize;
}
/**
* The maximum number of bytes that will be sent in a single PutRecordBatch operation.
* @return the maximum number of PutRecordBatch bytes
*/
public int getMaxPutRecordBatchBytes() {
return maxPutRecordBatchBytes;
}
/**
* The specified amount timeout the producerBuffer must be flushed if haven't met any other conditions previously.
* @return the specified amount timeout the producerBuffer must be flushed
*/
public long getBufferTimeoutInMillis() {
return bufferTimeoutInMillis;
}
/**
* The wait time in milliseconds in case a producerBuffer is full.
* @return The wait time in milliseconds
*/
public long getBufferFullWaitTimeoutInMillis() {
return bufferFullWaitTimeoutInMillis;
}
/**
* The interval between producerBuffer flushes.
* @return The interval between producerBuffer flushes
*/
public long getBufferTimeoutBetweenFlushes() {
return bufferTimeoutBetweenFlushes;
}
/**
* The max number of retries in case of recoverable failures.
* @return the max number of retries in case of recoverable failures
*/
public int getNumberOfRetries() {
return numberOfRetries;
}
/**
* The max backoff timeout (https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/)
* @return The max backoff timeout
*/
public long getMaxBackOffInMillis() {
return maxBackOffInMillis;
}
/**
* The base backoff timeout (https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/)
* @return The base backoff timeout
*/
public long getBaseBackOffInMillis() {
return baseBackOffInMillis;
}
/**
* The max timeout for a given addUserRecord operation.
* @return the max timeout for a given addUserRecord operation
*/
public long getMaxOperationTimeoutInMillis() {
return maxOperationTimeoutInMillis;
}
@Nonnull
public static Builder builder(@Nonnull final Properties config) {
final String region = config.getProperty(AWSConfigConstants.AWS_REGION);
return builder(region).withProperties(config);
}
@Nonnull
public static Builder builder(@Nullable final String region) {
return new Builder(region);
}
public static class Builder {
private int maxBufferSize = ProducerConfigConstants.DEFAULT_MAX_BUFFER_SIZE;
private int maxPutRecordBatchBytes;
private int numberOfRetries = ProducerConfigConstants.DEFAULT_NUMBER_OF_RETRIES;
private long bufferTimeoutInMillis = ProducerConfigConstants.DEFAULT_MAX_BUFFER_TIMEOUT;
private long maxOperationTimeoutInMillis = ProducerConfigConstants.DEFAULT_MAX_OPERATION_TIMEOUT;
private long bufferFullWaitTimeoutInMillis = ProducerConfigConstants.DEFAULT_WAIT_TIME_FOR_BUFFER_FULL;
private long bufferTimeoutBetweenFlushes = ProducerConfigConstants.DEFAULT_INTERVAL_BETWEEN_FLUSHES;
private long maxBackOffInMillis = ProducerConfigConstants.DEFAULT_MAX_BACKOFF;
private long baseBackOffInMillis = ProducerConfigConstants.DEFAULT_BASE_BACKOFF;
public Builder(@Nullable final String region) {
this.maxPutRecordBatchBytes = AWSUtil.getDefaultMaxPutRecordBatchBytes(region);
}
@Nonnull
public FirehoseProducerConfiguration build() {
return new FirehoseProducerConfiguration(this);
}
/**
* The max producer buffer size; the maximum number of records that will be sent in a PutRecordBatch request.
* @param maxBufferSize the max producer buffer size
* @return this builder
*/
@Nonnull
public Builder withMaxBufferSize(final int maxBufferSize) {
Validate.isTrue(maxBufferSize > 0 && maxBufferSize <= DEFAULT_MAX_BUFFER_SIZE,
String.format("Buffer size must be between 1 and %d", DEFAULT_MAX_BUFFER_SIZE));
this.maxBufferSize = maxBufferSize;
return this;
}
/**
* The maximum number of bytes that will be sent in a single PutRecordBatch operation.
* @param maxPutRecordBatchBytes the maximum number of PutRecordBatch bytes
* @return this builder
*/
@Nonnull
public Builder withMaxPutRecordBatchBytes(final int maxPutRecordBatchBytes) {
Validate.isTrue(maxPutRecordBatchBytes > 0 && maxPutRecordBatchBytes <= DEFAULT_MAXIMUM_BATCH_BYTES,
String.format("Maximum batch size in bytes must be between 1 and %d", DEFAULT_MAXIMUM_BATCH_BYTES));
this.maxPutRecordBatchBytes = maxPutRecordBatchBytes;
return this;
}
/**
* The max number of retries in case of recoverable failures.
* @param numberOfRetries the max number of retries in case of recoverable failures.
* @return this builder
*/
@Nonnull
public Builder withNumberOfRetries(final int numberOfRetries) {
Validate.isTrue(numberOfRetries >= 0, "Number of retries cannot be negative.");
this.numberOfRetries = numberOfRetries;
return this;
}
/**
* The specified amount timeout the producerBuffer must be flushed if haven't met any other conditions previously.
* @param bufferTimeoutInMillis the specified amount timeout the producerBuffer must be flushed
* @return this builder
*/
@Nonnull
public Builder withBufferTimeoutInMillis(final long bufferTimeoutInMillis) {
Validate.isTrue(bufferTimeoutInMillis >= 0, "Flush timeout should be greater than 0.");
this.bufferTimeoutInMillis = bufferTimeoutInMillis;
return this;
}
/**
* The max timeout for a given addUserRecord operation.
* @param maxOperationTimeoutInMillis The max timeout for a given addUserRecord operation
* @return this builder
*/
@Nonnull
public Builder withMaxOperationTimeoutInMillis(final long maxOperationTimeoutInMillis) {
Validate.isTrue(maxOperationTimeoutInMillis >= 0, "Max operation timeout should be greater than 0.");
this.maxOperationTimeoutInMillis = maxOperationTimeoutInMillis;
return this;
}
/**
* The wait time in milliseconds in case a producerBuffer is full.
* @param bufferFullWaitTimeoutInMillis the wait time in milliseconds in case a producerBuffer is full
* @return this builder
*/
@Nonnull
public Builder withBufferFullWaitTimeoutInMillis(final long bufferFullWaitTimeoutInMillis) {
Validate.isTrue(bufferFullWaitTimeoutInMillis >= 0, "Buffer full waiting timeout should be greater than 0.");
this.bufferFullWaitTimeoutInMillis = bufferFullWaitTimeoutInMillis;
return this;
}
/**
* The interval between producerBuffer flushes.
* @param bufferTimeoutBetweenFlushes the interval between producerBuffer flushes
* @return this builder
*/
@Nonnull
public Builder withBufferTimeoutBetweenFlushes(final long bufferTimeoutBetweenFlushes) {
Validate.isTrue(bufferTimeoutBetweenFlushes >= 0, "Interval between flushes cannot be negative.");
this.bufferTimeoutBetweenFlushes = bufferTimeoutBetweenFlushes;
return this;
}
/**
* The max backoff timeout (https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/)
* @param maxBackOffInMillis the max backoff timeout
* @return this builder
*/
@Nonnull
public Builder withMaxBackOffInMillis(final long maxBackOffInMillis) {
Validate.isTrue(maxBackOffInMillis >= 0, "Max backoff timeout should be greater than 0.");
this.maxBackOffInMillis = maxBackOffInMillis;
return this;
}
/**
* The base backoff timeout (https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/)
* @param baseBackOffInMillis The base backoff timeout
* @return this builder
*/
@Nonnull
public Builder withBaseBackOffInMillis(final long baseBackOffInMillis) {
Validate.isTrue(baseBackOffInMillis >= 0, "Base backoff timeout should be greater than 0.");
this.baseBackOffInMillis = baseBackOffInMillis;
return this;
}
/**
* Creates a Builder populated with values from the Properties.
* @param config the configuration properties
* @return this builder
*/
@Nonnull
public Builder withProperties(@Nonnull final Properties config) {
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_MAX_SIZE))
.map(Integer::parseInt)
.ifPresent(this::withMaxBufferSize);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES))
.map(Integer::parseInt)
.ifPresent(this::withMaxPutRecordBatchBytes);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_FLUSH_MAX_NUMBER_OF_RETRIES))
.map(Integer::parseInt)
.ifPresent(this::withNumberOfRetries);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT))
.map(Long::parseLong)
.ifPresent(this::withBufferTimeoutInMillis);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_FULL_WAIT_TIMEOUT))
.map(Long::parseLong)
.ifPresent(this::withBufferFullWaitTimeoutInMillis);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_FLUSH_TIMEOUT))
.map(Long::parseLong)
.ifPresent(this::withBufferTimeoutBetweenFlushes);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_MAX_BACKOFF_TIMEOUT))
.map(Long::parseLong)
.ifPresent(this::withMaxBackOffInMillis);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_BUFFER_BASE_BACKOFF_TIMEOUT))
.map(Long::parseLong)
.ifPresent(this::withBaseBackOffInMillis);
ofNullable(config.getProperty(FIREHOSE_PRODUCER_MAX_OPERATION_TIMEOUT))
.map(Long::parseLong)
.ifPresent(this::withMaxOperationTimeoutInMillis);
return this;
}
}
}
| 4,001 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/util/AWSUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.util;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.CredentialProvider;
import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehose;
import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehoseClientBuilder;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_KINESIS_FIREHOSE_ENDPOINT;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_KINESIS_FIREHOSE_ENDPOINT_SIGNING_REGION;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_REGION;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.accessKeyId;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.profileName;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.roleArn;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.roleSessionName;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.secretKey;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.DEFAULT_MAXIMUM_BATCH_BYTES;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants.REDUCED_QUOTA_MAXIMUM_THROUGHPUT;
public final class AWSUtil {
private AWSUtil() {
}
public static AmazonKinesisFirehose createKinesisFirehoseClientFromConfiguration(@Nonnull final Properties configProps,
@Nonnull final CredentialProvider credentialsProvider) {
validateConfiguration(configProps);
Validate.notNull(credentialsProvider, "Credential Provider cannot be null.");
AmazonKinesisFirehoseClientBuilder firehoseClientBuilder = AmazonKinesisFirehoseClientBuilder
.standard()
.withCredentials(credentialsProvider.getAwsCredentialsProvider());
final String region = configProps.getProperty(AWS_REGION, null);
final String firehoseEndpoint = configProps.getProperty(
AWS_KINESIS_FIREHOSE_ENDPOINT, null);
final String firehoseEndpointSigningRegion = configProps.getProperty(
AWS_KINESIS_FIREHOSE_ENDPOINT_SIGNING_REGION, null);
firehoseClientBuilder = (region != null) ? firehoseClientBuilder.withRegion(region)
: firehoseClientBuilder.withEndpointConfiguration(
new AwsClientBuilder.EndpointConfiguration(firehoseEndpoint, firehoseEndpointSigningRegion));
return firehoseClientBuilder.build();
}
public static Properties validateConfiguration(final Properties configProps) {
Validate.notNull(configProps, "Configuration properties cannot be null.");
if (!configProps.containsKey(AWS_REGION) ^ (configProps.containsKey(AWS_KINESIS_FIREHOSE_ENDPOINT) &&
configProps.containsKey(AWS_KINESIS_FIREHOSE_ENDPOINT_SIGNING_REGION))) {
throw new IllegalArgumentException(
"Either AWS region should be specified or AWS Firehose endpoint and endpoint signing region.");
}
return configProps;
}
public static Properties validateBasicProviderConfiguration(final Properties configProps, final String providerKey) {
validateConfiguration(configProps);
Validate.isTrue(configProps.containsKey(accessKeyId(providerKey)),
"AWS access key must be specified with credential provider BASIC.");
Validate.isTrue(configProps.containsKey(secretKey(providerKey)),
"AWS secret key must be specified with credential provider BASIC.");
return configProps;
}
public static Properties validateBasicProviderConfiguration(final Properties configProps) {
return validateBasicProviderConfiguration(configProps, null);
}
public static boolean containsBasicProperties(final Properties configProps, final String providerKey) {
Validate.notNull(configProps);
return configProps.containsKey(accessKeyId(providerKey)) && configProps.containsKey(secretKey(providerKey));
}
public static AWSConfigConstants.CredentialProviderType getCredentialProviderType(final Properties configProps,
final String providerKey) {
if (providerKey == null || !configProps.containsKey(providerKey)) {
return containsBasicProperties(configProps, providerKey) ?
AWSConfigConstants.CredentialProviderType.BASIC : AWSConfigConstants.CredentialProviderType.AUTO;
}
final String providerTypeString = configProps.getProperty(providerKey);
if (StringUtils.isEmpty(providerTypeString)) {
return AWSConfigConstants.CredentialProviderType.AUTO;
}
try {
return AWSConfigConstants.CredentialProviderType.valueOf(providerTypeString);
} catch (IllegalArgumentException e) {
return AWSConfigConstants.CredentialProviderType.AUTO;
}
}
public static Properties validateProfileProviderConfiguration(final Properties configProps, final String providerKey) {
validateConfiguration(configProps);
Validate.notBlank(providerKey);
Validate.isTrue(configProps.containsKey(profileName(providerKey)),
"AWS profile name should be specified with credential provider PROFILE.");
return configProps;
}
public static Properties validateProfileProviderConfiguration(final Properties configProps) {
return validateProfileProviderConfiguration(configProps, AWS_CREDENTIALS_PROVIDER);
}
public static Properties validateAssumeRoleCredentialsProvider(final Properties configProps, final String providerKey) {
validateConfiguration(configProps);
Validate.isTrue(configProps.containsKey(roleArn(providerKey)),
"AWS role arn to be assumed must be provided with credential provider type ASSUME_ROLE");
Validate.isTrue(configProps.containsKey(roleSessionName(providerKey)),
"AWS role session name must be provided with credential provider type ASSUME_ROLE");
return configProps;
}
public static Properties validateAssumeRoleCredentialsProvider(final Properties configProps) {
return validateAssumeRoleCredentialsProvider(configProps, AWS_CREDENTIALS_PROVIDER);
}
/**
* Computes a sensible maximum put record batch size based on region.
* There is a maximum batch size of 4 MiB per call, this will exceed the 1 MiB/second quota in some regions.
* https://docs.aws.amazon.com/firehose/latest/dev/limits.html
*
* If the region is null, it falls back to the lower batch size.
* Customer can override this value in producer properties.
*
* @param region the region the producer is running in
* @return a sensible maximum batch size
*/
public static int getDefaultMaxPutRecordBatchBytes(@Nullable final String region) {
if (region != null) {
switch (region) {
case "us-east-1":
case "us-west-2":
case "eu-west-1":
return DEFAULT_MAXIMUM_BATCH_BYTES;
}
}
return REDUCED_QUOTA_MAXIMUM_THROUGHPUT;
}
}
| 4,002 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/config/ProducerConfigConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.config;
import java.util.concurrent.TimeUnit;
public class ProducerConfigConstants {
/** The default MAX buffer size. Users should be able to specify a larger buffer if needed, since we don't bound it.
* However, this value should be exercised with caution, since Kinesis Firehose limits PutRecordBatch at 500 records or 4MiB per call.
* Please refer to https://docs.aws.amazon.com/firehose/latest/dev/limits.html for further reference.
* */
public static final int DEFAULT_MAX_BUFFER_SIZE = 500;
/** The maximum number of bytes that can be sent in a single PutRecordBatch operation */
public static final int DEFAULT_MAXIMUM_BATCH_BYTES = 4 * 1_024 * 1_024;
/** The MAX default timeout for the buffer to be flushed */
public static final long DEFAULT_MAX_BUFFER_TIMEOUT = TimeUnit.MINUTES.toMillis(5);
/** The MAX default timeout for a given addUserRecord operation */
public static final long DEFAULT_MAX_OPERATION_TIMEOUT = TimeUnit.MINUTES.toMillis(5);
/** The default wait time in milliseconds in case a buffer is full */
public static final long DEFAULT_WAIT_TIME_FOR_BUFFER_FULL = 100L;
/** The default interval between buffer flushes */
public static final long DEFAULT_INTERVAL_BETWEEN_FLUSHES = 50L;
/** The default MAX number of retries in case of recoverable failures */
public static final int DEFAULT_NUMBER_OF_RETRIES = 10;
/** The default MAX backoff timeout
* https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
* */
public static final long DEFAULT_MAX_BACKOFF = 100L;
/** The default BASE timeout to be used on Jitter backoff
* https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
* */
public static final long DEFAULT_BASE_BACKOFF = 10L;
/** The reduced quota maximum throughout.
* Some regions have lower throughput quotas than others.
* Please refer to https://docs.aws.amazon.com/firehose/latest/dev/limits.html for further reference. */
public static final int REDUCED_QUOTA_MAXIMUM_THROUGHPUT = 1_024 * 1_024;
public static final String FIREHOSE_PRODUCER_BUFFER_MAX_SIZE = "firehose.producer.batch.size";
public static final String FIREHOSE_PRODUCER_BUFFER_MAX_BATCH_BYTES = "firehose.producer.batch.bytes";
public static final String FIREHOSE_PRODUCER_BUFFER_MAX_TIMEOUT = "firehose.producer.buffer.timeout";
public static final String FIREHOSE_PRODUCER_BUFFER_FULL_WAIT_TIMEOUT = "firehose.producer.buffer.full.wait.timeout";
public static final String FIREHOSE_PRODUCER_BUFFER_FLUSH_TIMEOUT = "firehose.producer.buffer.flush.timeout";
public static final String FIREHOSE_PRODUCER_BUFFER_FLUSH_MAX_NUMBER_OF_RETRIES = "firehose.producer.buffer.flush.retries";
public static final String FIREHOSE_PRODUCER_BUFFER_MAX_BACKOFF_TIMEOUT = "firehose.producer.buffer.max.backoff";
public static final String FIREHOSE_PRODUCER_BUFFER_BASE_BACKOFF_TIMEOUT = "firehose.producer.buffer.base.backoff";
public static final String FIREHOSE_PRODUCER_MAX_OPERATION_TIMEOUT = "firehose.producer.operation.timeout";
private ProducerConfigConstants() {
// Prevent instantiation
}
}
| 4,003 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/config/AWSConfigConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.config;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* AWS Kinesis Firehose configuration constants
*/
public class AWSConfigConstants {
public enum CredentialProviderType {
/** Look for AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY into passed configuration */
BASIC,
/** Look for the environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY to create AWS credentials. */
ENV_VARIABLES,
/** Look for Java system properties aws.accessKeyId and aws.secretKey to create AWS credentials. */
SYS_PROPERTIES,
/** Use a AWS credentials profile file to create the AWS credentials. */
PROFILE,
/** Create AWS credentials by assuming a role. The credentials for assuming the role must be supplied. **/
ASSUME_ROLE,
/** A credentials provider chain will be used that searches for credentials in this order:
* ENV_VARIABLES, SYS_PROPERTIES, PROFILE in the AWS instance metadata. **/
AUTO
}
/** The AWS access key for provider type basic */
public static final String AWS_ACCESS_KEY_ID = "aws_access_key_id";
/** The AWS secret key for provider type basic */
public static final String AWS_SECRET_ACCESS_KEY = "aws_secret_access_key";
/** The AWS Kinesis Firehose region, if not specified defaults to us-east-1 */
public static final String AWS_REGION = "aws.region";
/**
* The credential provider type to use when AWS credentials are required
* (AUTO is used if not set, unless access key id and access secret key are set, then BASIC is used).
*/
public static final String AWS_CREDENTIALS_PROVIDER = "aws.credentials.provider";
/** The Kinesis Firehose endpoint */
public static final String AWS_KINESIS_FIREHOSE_ENDPOINT = "aws.kinesis.firehose.endpoint";
public static final String AWS_KINESIS_FIREHOSE_ENDPOINT_SIGNING_REGION = "aws.kinesis.firehose.endpoint.signing.region";
/** Optional configuration in case the provider is AwsProfileCredentialProvider */
public static final String AWS_PROFILE_NAME = profileName(AWS_CREDENTIALS_PROVIDER);
/** Optional configuration in case the provider is AwsProfileCredentialProvider */
public static final String AWS_PROFILE_PATH = profilePath(AWS_CREDENTIALS_PROVIDER);
/** The role ARN to use when credential provider type is set to ASSUME_ROLE. */
public static final String AWS_ROLE_ARN = roleArn(AWS_CREDENTIALS_PROVIDER);
/** The role session name to use when credential provider type is set to ASSUME_ROLE. */
public static final String AWS_ROLE_SESSION_NAME = roleSessionName(AWS_CREDENTIALS_PROVIDER);
/** The external ID to use when credential provider type is set to ASSUME_ROLE. */
public static final String AWS_ROLE_EXTERNAL_ID = externalId(AWS_CREDENTIALS_PROVIDER);
/**
* The credentials provider that provides credentials for assuming the role when credential
* provider type is set to ASSUME_ROLE.
* Roles can be nested, so AWS_ROLE_CREDENTIALS_PROVIDER can again be set to "ASSUME_ROLE"
*/
public static final String AWS_ROLE_CREDENTIALS_PROVIDER = roleCredentialsProvider(AWS_CREDENTIALS_PROVIDER);
private AWSConfigConstants() {
// Prevent instantiation
}
@Nonnull
public static String accessKeyId(@Nullable String prefix) {
return (!StringUtils.isEmpty(prefix) ? prefix + ".basic." : "") + AWS_ACCESS_KEY_ID;
}
@Nonnull
public static String accessKeyId() {
return accessKeyId(null);
}
@Nonnull
public static String secretKey(@Nullable String prefix) {
return (!StringUtils.isEmpty(prefix) ? prefix + ".basic." : "") + AWS_SECRET_ACCESS_KEY;
}
@Nonnull
public static String secretKey() {
return secretKey(null);
}
@Nonnull
public static String profilePath(@Nonnull String prefix) {
Validate.notBlank(prefix);
return prefix + ".profile.path";
}
@Nonnull
public static String profileName(@Nonnull String prefix) {
Validate.notBlank(prefix);
return prefix + ".profile.name";
}
@Nonnull
public static String roleArn(@Nonnull String prefix) {
Validate.notBlank(prefix);
return prefix + ".role.arn";
}
@Nonnull
public static String roleSessionName(@Nonnull String prefix) {
Validate.notBlank(prefix);
return prefix + ".role.sessionName";
}
@Nonnull
public static String externalId(@Nonnull String prefix) {
Validate.notBlank(prefix);
return prefix + ".role.externalId";
}
@Nonnull
public static String roleCredentialsProvider(@Nonnull String prefix) {
Validate.notBlank(prefix);
return prefix + ".role.provider";
}
}
| 4,004 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/serialization/JsonSerializationSchema.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.serialization;
import com.amazonaws.services.kinesisanalytics.flink.connectors.exception.SerializationException;
import org.apache.commons.lang3.Validate;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
public class JsonSerializationSchema<T> implements SerializationSchema<T> {
private static final ObjectMapper mapper = new ObjectMapper();
/**
* Serializes the incoming element to a specified type.
*
* @param element The incoming element to be serialized
* @return The serialized element.
*/
@Override
public byte[] serialize(T element) {
Validate.notNull(element);
try {
return mapper.writeValueAsBytes(element);
} catch (JsonProcessingException e) {
throw new SerializationException("Failed trying to serialize", e);
}
}
}
| 4,005 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/serialization/KinesisFirehoseSerializationSchema.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.serialization;
import java.io.Serializable;
import java.nio.ByteBuffer;
public interface KinesisFirehoseSerializationSchema<T> extends Serializable {
ByteBuffer serialize(T element);
}
| 4,006 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/DefaultCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import java.util.Properties;
public class DefaultCredentialProvider extends CredentialProvider {
public DefaultCredentialProvider(final Properties properties, final String providerKey) {
super(properties, providerKey);
}
public DefaultCredentialProvider(final Properties properties) {
this(properties, null);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
return new DefaultAWSCredentialsProviderChain();
}
}
| 4,007 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/ProfileCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants;
import org.apache.commons.lang3.StringUtils;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateProfileProviderConfiguration;
public class ProfileCredentialProvider extends CredentialProvider {
public ProfileCredentialProvider(final Properties properties, final String providerKey) {
super(validateProfileProviderConfiguration(properties, providerKey), providerKey);
}
public ProfileCredentialProvider(final Properties properties) {
this(properties, AWS_CREDENTIALS_PROVIDER);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
final String profileName = properties.getProperty(AWSConfigConstants.profileName(providerKey));
final String profilePath = properties.getProperty(AWSConfigConstants.profilePath(providerKey));
return StringUtils.isEmpty(profilePath) ? new ProfileCredentialsProvider(profileName) :
new ProfileCredentialsProvider(profilePath, profileName);
}
}
| 4,008 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/CredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil;
import java.util.Properties;
public abstract class CredentialProvider {
final Properties properties;
final String providerKey;
CredentialProvider(final Properties properties, final String providerKey) {
this.properties = AWSUtil.validateConfiguration(properties);
this.providerKey = providerKey == null ? "" : providerKey;
}
public CredentialProvider(final Properties properties) {
this(properties, null);
}
public abstract AWSCredentialsProvider getAwsCredentialsProvider();
protected Properties getProperties() {
return this.properties;
}
protected String getProviderKey() {
return this.providerKey;
}
}
| 4,009 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/SystemCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.SystemPropertiesCredentialsProvider;
import java.util.Properties;
public class SystemCredentialProvider extends CredentialProvider {
public SystemCredentialProvider(final Properties properties, final String providerKey) {
super(properties, providerKey);
}
public SystemCredentialProvider(final Properties properties) {
this(properties, null);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
return new SystemPropertiesCredentialsProvider();
}
}
| 4,010 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/EnvironmentCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.EnvironmentVariableCredentialsProvider;
import java.util.Properties;
public class EnvironmentCredentialProvider extends CredentialProvider {
public EnvironmentCredentialProvider(final Properties properties, final String providerKey) {
super(properties, providerKey);
}
public EnvironmentCredentialProvider(final Properties properties) {
this(properties, null);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
return new EnvironmentVariableCredentialsProvider();
}
}
| 4,011 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/BasicCredentialProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.accessKeyId;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.secretKey;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateBasicProviderConfiguration;
public class BasicCredentialProvider extends CredentialProvider {
public BasicCredentialProvider(final Properties properties, final String providerKey) {
super(validateBasicProviderConfiguration(properties, providerKey), providerKey);
}
public BasicCredentialProvider(Properties properties) {
this(properties, null);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
return new AWSCredentialsProvider() {
@Override
public AWSCredentials getCredentials() {
return new BasicAWSCredentials(properties.getProperty(accessKeyId(providerKey)), properties.getProperty(secretKey(providerKey)));
}
@Override
public void refresh() {
}
};
}
}
| 4,012 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/AssumeRoleCredentialsProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.factory.CredentialProviderFactory;
import com.amazonaws.services.securitytoken.AWSSecurityTokenService;
import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClientBuilder;
import javax.annotation.Nonnull;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.getCredentialProviderType;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.util.AWSUtil.validateAssumeRoleCredentialsProvider;
public class AssumeRoleCredentialsProvider extends CredentialProvider {
public AssumeRoleCredentialsProvider(final Properties properties, final String providerKey) {
super(validateAssumeRoleCredentialsProvider(properties, providerKey), providerKey);
}
public AssumeRoleCredentialsProvider(final Properties properties) {
this(properties, AWS_CREDENTIALS_PROVIDER);
}
@Override
public AWSCredentialsProvider getAwsCredentialsProvider() {
final String baseCredentialsProviderKey = AWSConfigConstants.roleCredentialsProvider(providerKey);
final AWSConfigConstants.CredentialProviderType baseCredentialsProviderType = getCredentialProviderType(properties, baseCredentialsProviderKey);
final CredentialProvider baseCredentialsProvider =
CredentialProviderFactory.newCredentialProvider(baseCredentialsProviderType, properties, baseCredentialsProviderKey);
final AWSSecurityTokenService baseCredentials = AWSSecurityTokenServiceClientBuilder.standard()
.withCredentials(baseCredentialsProvider.getAwsCredentialsProvider())
.withRegion(properties.getProperty(AWSConfigConstants.AWS_REGION))
.build();
return createAwsCredentialsProvider(
properties.getProperty(AWSConfigConstants.roleArn(providerKey)),
properties.getProperty(AWSConfigConstants.roleSessionName(providerKey)),
properties.getProperty(AWSConfigConstants.externalId(providerKey)),
baseCredentials);
}
AWSCredentialsProvider createAwsCredentialsProvider(@Nonnull String roleArn,
@Nonnull String roleSessionName,
@Nonnull String externalId,
@Nonnull AWSSecurityTokenService securityTokenService) {
return new STSAssumeRoleSessionCredentialsProvider.Builder(roleArn, roleSessionName)
.withExternalId(externalId)
.withStsClient(securityTokenService)
.build();
}
}
| 4,013 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/provider/credential/factory/CredentialProviderFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.factory;
import com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.CredentialProviderType;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.AssumeRoleCredentialsProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.BasicCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.CredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.DefaultCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.EnvironmentCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.ProfileCredentialProvider;
import com.amazonaws.services.kinesisanalytics.flink.connectors.provider.credential.SystemCredentialProvider;
import org.apache.commons.lang3.Validate;
import java.util.Properties;
import static com.amazonaws.services.kinesisanalytics.flink.connectors.config.AWSConfigConstants.AWS_CREDENTIALS_PROVIDER;
public final class CredentialProviderFactory {
private CredentialProviderFactory() {
}
public static CredentialProvider newCredentialProvider(final CredentialProviderType credentialProviderType,
final Properties awsConfigProps,
final String awsConfigCredentialProviderKey) {
Validate.notNull(awsConfigProps, "AWS configuration properties cannot be null");
if (credentialProviderType == null) {
return new DefaultCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
}
switch (credentialProviderType) {
case BASIC:
// For basic provider, allow the top-level provider key to be missing
if (AWS_CREDENTIALS_PROVIDER.equals(awsConfigCredentialProviderKey)
&& !awsConfigProps.containsKey(AWS_CREDENTIALS_PROVIDER)) {
return new BasicCredentialProvider(awsConfigProps, null);
} else {
return new BasicCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
}
case PROFILE:
return new ProfileCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
case ENV_VARIABLES:
return new EnvironmentCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
case SYS_PROPERTIES:
return new SystemCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
case ASSUME_ROLE:
return new AssumeRoleCredentialsProvider(awsConfigProps, awsConfigCredentialProviderKey);
default:
case AUTO:
return new DefaultCredentialProvider(awsConfigProps, awsConfigCredentialProviderKey);
}
}
public static CredentialProvider newCredentialProvider(final CredentialProviderType credentialProviderType,
final Properties awsConfigProps) {
return newCredentialProvider(credentialProviderType, awsConfigProps, AWS_CREDENTIALS_PROVIDER);
}
}
| 4,014 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/RecordCouldNotBeSentException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class RecordCouldNotBeSentException extends RuntimeException {
public RecordCouldNotBeSentException(final String msg, final Throwable ex) {
super(msg, ex);
}
public RecordCouldNotBeSentException(final String msg) {
super(msg);
}
}
| 4,015 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/TimeoutExpiredException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class TimeoutExpiredException extends FlinkKinesisFirehoseException {
public TimeoutExpiredException(String msg, Throwable ex) {
super(msg, ex);
}
public TimeoutExpiredException(String msg) {
super(msg);
}
}
| 4,016 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/FlinkKinesisFirehoseException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class FlinkKinesisFirehoseException extends Exception {
public FlinkKinesisFirehoseException(final String msg, final Throwable ex) {
super(msg, ex);
}
public FlinkKinesisFirehoseException(final String msg) {
super(msg);
}
}
| 4,017 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/SerializationException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class SerializationException extends RuntimeException {
public SerializationException(final String msg, final Throwable t) {
super(msg, t);
}
private SerializationException(final String msg) {
super(msg);
}
}
| 4,018 |
0 | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors | Create_ds/aws-kinesisanalytics-flink-connectors/src/main/java/com/amazonaws/services/kinesisanalytics/flink/connectors/exception/RecordCouldNotBeBuffered.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.flink.connectors.exception;
public class RecordCouldNotBeBuffered extends FlinkKinesisFirehoseException {
public RecordCouldNotBeBuffered(String msg, Throwable ex) {
super(msg, ex);
}
public RecordCouldNotBeBuffered(String msg) {
super(msg);
}
}
| 4,019 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/util/JsonUtilTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.griffin.core.job.entity.JobHealth;
import org.junit.Test;
public class JsonUtilTest {
public static final String JOB_HEALTH_JSON = "{\"healthyJobCount\":5,\"jobCount\":10}";
@Test
public void testToJson() throws JsonProcessingException {
JobHealth jobHealth = new JobHealth(5, 10);
String jobHealthStr = JsonUtil.toJson(jobHealth);
assertEquals(jobHealthStr, JOB_HEALTH_JSON);
}
@Test
public void testToJsonWithFormat() throws JsonProcessingException {
JobHealth jobHealth = new JobHealth(5, 10);
String jobHealthStr = JsonUtil.toJsonWithFormat(jobHealth);
assertNotEquals(jobHealthStr, JOB_HEALTH_JSON);
}
@Test
public void testToEntityWithParamClass() throws IOException {
JobHealth jobHealth = JsonUtil.toEntity(JOB_HEALTH_JSON,
JobHealth.class);
assertEquals(jobHealth.getJobCount(), 10);
assertEquals(jobHealth.getHealthyJobCount(), 5);
}
@Test
public void testToEntityWithNullParamClass() throws IOException {
String str = null;
JobHealth jobHealth = JsonUtil.toEntity(str, JobHealth.class);
assertNull(jobHealth);
}
@Test
public void testToEntityWithParamTypeReference() throws IOException {
TypeReference<HashMap<String, Integer>> type =
new TypeReference<HashMap<String, Integer>>() {
};
Map map = JsonUtil.toEntity(JOB_HEALTH_JSON, type);
assertEquals(map.get("jobCount"), 10);
}
@Test
public void testToEntityWithNullParamTypeReference() throws IOException {
String str = null;
TypeReference<HashMap<String, Integer>> type =
new TypeReference<HashMap<String, Integer>>() {
};
Map map = JsonUtil.toEntity(str, type);
assertNull(map);
}
}
| 4,020 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/util/PredicatorMock.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import org.apache.griffin.core.job.Predicator;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import java.io.IOException;
public class PredicatorMock implements Predicator {
public PredicatorMock(SegmentPredicate segmentPredicate) {
}
@Override
public boolean predicate() throws IOException {
return true;
}
}
| 4,021 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/util/URLHelper.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
public class URLHelper {
public static final String API_VERSION_PATH = "/api/v1";
public static final String KAFKA_API_PATH = "/metadata/kafka";
}
| 4,022 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/util/PropertiesUtilTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.apache.griffin.core.util.PropertiesUtil.getConf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.FileNotFoundException;
import java.util.Properties;
import org.junit.Test;
import org.springframework.core.io.ClassPathResource;
public class PropertiesUtilTest {
@Test
public void testGetPropertiesForSuccess() {
String path = "/quartz.properties";
Properties properties = PropertiesUtil.getProperties(path,
new ClassPathResource(path));
assertEquals(properties
.get("org.quartz.jobStore.isClustered"), "true");
}
@Test
public void testGetPropertiesForFailureWithWrongPath() {
String path = ".././quartz.properties";
Properties properties = PropertiesUtil.getProperties(path,
new ClassPathResource(path));
assertEquals(properties, null);
}
@Test
public void testGetConfWithLocation() throws FileNotFoundException {
String name = "sparkJob.properties";
String defaultPath = "/" + name;
String location = "src/test/resources";
Properties properties = getConf(name, defaultPath, location);
assertNotNull(properties);
}
@Test
public void testGetConfWithLocationEmpty() throws FileNotFoundException {
String name = "sparkJob.properties";
String defaultPath = "/" + name;
String location = "src/main";
Properties properties = getConf(name, defaultPath, location);
assertNotNull(properties);
}
@Test
public void testGetConfWithNoLocation() throws FileNotFoundException {
String name = "sparkJob.properties";
String defaultPath = "/" + name;
Properties properties = getConf(name, defaultPath, null);
assertNotNull(properties);
}
}
| 4,023 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/util/EntityMocksHelper.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.apache.griffin.core.job.JobInstance.MEASURE_KEY;
import static org.apache.griffin.core.job.JobInstance.PREDICATES_KEY;
import static org.apache.griffin.core.job.JobInstance.PREDICATE_JOB_NAME;
import static org.apache.griffin.core.job.JobServiceImpl.GRIFFIN_JOB_ID;
import static org.apache.hadoop.mapreduce.MRJobConfig.JOB_NAME;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.entity.JobDataSegment;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.job.entity.SegmentRange;
import org.apache.griffin.core.job.entity.VirtualJob;
import org.apache.griffin.core.measure.entity.*;
import org.quartz.JobDataMap;
import org.quartz.JobKey;
import org.quartz.SimpleTrigger;
import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.triggers.SimpleTriggerImpl;
public class EntityMocksHelper {
public static final String CRON_EXPRESSION = "0 0/4 * * * ?";
public static final String TIME_ZONE = "GMT+8:00";
public static GriffinMeasure createGriffinMeasure(String name)
throws Exception {
DataConnector dcSource = createDataConnector("source_name", "default",
"test_data_src", "dt=#YYYYMMdd# AND hour=#HH#");
DataConnector dcTarget = createDataConnector("target_name", "default",
"test_data_tgt", "dt=#YYYYMMdd# AND hour=#HH#");
return createGriffinMeasure(name, dcSource, dcTarget);
}
public static GriffinMeasure createGriffinMeasure(
String name,
SegmentPredicate srcPredicate,
SegmentPredicate tgtPredicate)
throws Exception {
DataConnector dcSource = createDataConnector("source_name", "default",
"test_data_src", "dt=#YYYYMMdd# AND hour=#HH#", srcPredicate);
DataConnector dcTarget = createDataConnector("target_name", "default",
"test_data_tgt", "dt=#YYYYMMdd# AND hour=#HH#", tgtPredicate);
return createGriffinMeasure(name, dcSource, dcTarget);
}
public static GriffinMeasure createGriffinMeasure(
String name,
DataConnector dcSource,
DataConnector dcTarget)
throws Exception {
DataSource dataSource = new DataSource(
"source", true, createCheckpointMap(), dcSource);
DataSource targetSource = new DataSource(
"target", false, createCheckpointMap(), dcTarget);
List<DataSource> dataSources = new ArrayList<>();
dataSources.add(dataSource);
dataSources.add(targetSource);
Rule rule = createRule();
EvaluateRule evaluateRule = new EvaluateRule(Arrays.asList(rule));
return new GriffinMeasure(
name, "test", dataSources,
evaluateRule, Arrays.asList("ELASTICSEARCH", "HDFS"));
}
private static Rule createRule() throws JsonProcessingException {
Map<String, Object> map = new HashMap<>();
map.put("detail", "detail");
String rule = "source.id=target.id " +
"AND source.name=target.name AND source.age=target.age";
Map<String, Object> metricMap = new HashMap<>();
Map<String, Object> recordMap = new HashMap<>();
metricMap.put("type", "metric");
metricMap.put("name", "accu");
recordMap.put("type", "record");
recordMap.put("name", "missRecords");
List<Map<String, Object>> outList = Arrays.asList(metricMap, recordMap);
return new Rule(
"griffin-dsl", DqType.ACCURACY, rule,
"in", "out", map, outList);
}
private static Map<String, Object> createCheckpointMap() {
Map<String, Object> map = new HashMap<>();
map.put("info.path", "source");
return map;
}
public static DataConnector createDataConnector(
String name,
String database,
String table,
String where)
throws IOException {
HashMap<String, String> config = new HashMap<>();
config.put("database", database);
config.put("table.name", table);
config.put("where", where);
return new DataConnector(
name, DataConnector.DataType.HIVE, "1.2",
JsonUtil.toJson(config), "kafka");
}
public static DataConnector createDataConnector(
String name,
String database,
String table,
String where,
SegmentPredicate predicate) {
HashMap<String, String> config = new HashMap<>();
config.put("database", database);
config.put("table.name", table);
config.put("where", where);
return new DataConnector(name, "1h", config, Arrays.asList(predicate));
}
public static ExternalMeasure createExternalMeasure(String name) {
return new ExternalMeasure(name, "description", "org",
"test", "metricName", new VirtualJob());
}
public static AbstractJob createJob(String jobName) {
JobDataSegment segment1 = createJobDataSegment("source_name", true);
JobDataSegment segment2 = createJobDataSegment("target_name", false);
List<JobDataSegment> segments = new ArrayList<>();
segments.add(segment1);
segments.add(segment2);
return new BatchJob(1L, jobName,
CRON_EXPRESSION, TIME_ZONE, segments, false);
}
public static AbstractJob createJob(String jobName, SegmentRange range) {
BatchJob job = new BatchJob();
JobDataSegment segment1 = createJobDataSegment(
"source_name", true, range);
JobDataSegment segment2 = createJobDataSegment(
"target_name", false, range);
List<JobDataSegment> segments = new ArrayList<>();
segments.add(segment1);
segments.add(segment2);
return new BatchJob(1L, jobName,
CRON_EXPRESSION, TIME_ZONE, segments, false);
}
public static AbstractJob createJob(
String jobName,
JobDataSegment source,
JobDataSegment target) {
List<JobDataSegment> segments = new ArrayList<>();
segments.add(source);
segments.add(target);
return new BatchJob(1L, jobName,
CRON_EXPRESSION, TIME_ZONE, segments, false);
}
public static JobDataSegment createJobDataSegment(
String dataConnectorName,
Boolean baseline,
SegmentRange range) {
return new JobDataSegment(dataConnectorName, baseline, range);
}
public static JobDataSegment createJobDataSegment(
String dataConnectorName,
Boolean baseline) {
return new JobDataSegment(dataConnectorName, baseline);
}
public static JobInstanceBean createJobInstance() {
JobInstanceBean jobBean = new JobInstanceBean();
jobBean.setSessionId(1L);
jobBean.setState(LivySessionStates.State.STARTING);
jobBean.setAppId("app_id");
jobBean.setTms(System.currentTimeMillis());
return jobBean;
}
public static JobDetailImpl createJobDetail(
String measureJson,
String predicatesJson) {
JobDetailImpl jobDetail = new JobDetailImpl();
JobKey jobKey = new JobKey("name", "group");
jobDetail.setKey(jobKey);
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put(MEASURE_KEY, measureJson);
jobDataMap.put(PREDICATES_KEY, predicatesJson);
jobDataMap.put(JOB_NAME, "jobName");
jobDataMap.put("jobName", "jobName");
jobDataMap.put(PREDICATE_JOB_NAME, "predicateJobName");
jobDataMap.put(GRIFFIN_JOB_ID, 1L);
jobDetail.setJobDataMap(jobDataMap);
return jobDetail;
}
public static SegmentPredicate createFileExistPredicate()
throws IOException {
Map<String, String> config = new HashMap<>();
config.put("root.path", "hdfs:///griffin/demo_src");
config.put("path", "/dt=#YYYYMMdd#/hour=#HH#/_DONE");
SegmentPredicate segmentPredicate = new SegmentPredicate("file.exist", config);
segmentPredicate.setId(1L);
segmentPredicate.load();
return segmentPredicate;
}
public static SegmentPredicate createMockPredicate()
throws IOException {
Map<String, String> config = new HashMap<>();
config.put("class", "org.apache.griffin.core.util.PredicatorMock");
SegmentPredicate segmentPredicate = new SegmentPredicate("custom", config);
segmentPredicate.setId(1L);
segmentPredicate.load();
return segmentPredicate;
}
public static Map<String, Object> createJobDetailMap() {
Map<String, Object> detail = new HashMap<>();
detail.put("jobId", 1L);
detail.put("jobName", "jobName");
detail.put("measureId", 1L);
detail.put("cronExpression", CRON_EXPRESSION);
return detail;
}
public static SimpleTrigger createSimpleTrigger(
int repeatCount,
int triggerCount) {
SimpleTriggerImpl trigger = new SimpleTriggerImpl();
trigger.setRepeatCount(repeatCount);
trigger.setTimesTriggered(triggerCount);
trigger.setPreviousFireTime(new Date());
return trigger;
}
public static BatchJob createGriffinJob() {
return new BatchJob(1L, 1L, "jobName",
"quartzJobName", "quartzGroupName", false);
}
}
| 4,024 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/util/TimeUtilTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.junit.Assert.assertEquals;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class TimeUtilTest {
@Test
public void testStr2LongWithPositive() {
String time = "2hr3m4s";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "7384000");
}
@Test
public void testStr2LongWithNegative() {
String time = "-2hr3min4s";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "-7384000");
}
@Test
public void testStr2LongWithNull() {
String time = null;
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "0");
}
@Test
public void testStr2LongWithDay() {
String time = "1DAY";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "86400000");
}
@Test
public void testStr2LongWithHour() {
String time = "1h";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "3600000");
}
@Test
public void testStr2LongWithMinute() {
String time = "1m";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "60000");
}
@Test
public void testStr2LongWithSecond() {
String time = "1s";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "1000");
}
@Test
public void testStr2LongWithMillisecond() {
String time = "1ms";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "1");
}
@Test
public void testStr2LongWithIllegalFormat() {
String time = "1y2m3s";
assertEquals(String.valueOf(TimeUtil.str2Long(time)), "123000");
}
@Test
public void testFormat() {
String format = "dt=#YYYYMMdd#";
Long time = 1516186620155L;
String timeZone = "GMT+8:00";
assertEquals(TimeUtil.format(format, time, TimeZone
.getTimeZone(timeZone)), "dt=20180117");
}
@Test
public void testFormatWithDiff() {
String format = "dt=#YYYYMMdd#/hour=#HH#";
Long time = 1516186620155L;
String timeZone = "GMT+8:00";
assertEquals(TimeUtil.format(format, time, TimeZone
.getTimeZone(timeZone)), "dt=20180117/hour=18");
}
@Test(expected = IllegalArgumentException.class)
public void testFormatWithIllegalException() {
String format = "\\#YYYYMMdd\\#";
Long time = 1516186620155L;
String timeZone = "GMT+8:00";
TimeUtil.format(format, time, TimeZone.getTimeZone(timeZone));
}
@Test
public void testGetTimeZone() {
Map<String, String> tests = new HashMap<>();
tests.put("", TimeZone.getDefault().getID());
// standard cases
tests.put("GMT", "GMT");
tests.put("GMT+1", "GMT+01:00");
tests.put("GMT+1:00", "GMT+01:00");
tests.put("GMT+01:00", "GMT+01:00");
tests.put("GMT-1", "GMT-01:00");
tests.put("GMT-1:00", "GMT-01:00");
tests.put("GMT-01:00", "GMT-01:00");
// values pushed by UI for jobs
tests.put("GMT1", "GMT");
tests.put("GMT1:00", "GMT");
tests.put("GMT01:00", "GMT");
// values generated by UI for datasets in a past
tests.put("UTC1", "GMT");
tests.put("UTC1:00", "GMT");
tests.put("UTC01:00", "GMT");
tests.put("UTC-1", "GMT");
tests.put("UTC-1:00", "GMT");
tests.put("UTC-01:00", "GMT");
// "named" time zones support
tests.put("CST", "CST"); // supported
tests.put("CDT", "GMT"); // not supported
tests.put("America/Los_Angeles", "America/Los_Angeles"); // supported
tests.forEach((input, expected) -> {
String actual = TimeUtil.getTimeZone(input).getID();
assertEquals(String.format("For input: %s", input), expected, actual);
});
}
}
| 4,025 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/config/PropertiesConfigTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import static org.junit.Assert.assertEquals;
import java.util.Properties;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class PropertiesConfigTest {
@TestConfiguration
public static class PropertiesConf {
@Bean(name = "noLivyConf")
public PropertiesConfig noSparkConf() {
return new PropertiesConfig(null, null);
}
@Bean(name = "livyConf")
public PropertiesConfig sparkConf() {
return new PropertiesConfig("src/test/resources", null);
}
@Bean(name = "livyNotFoundConfig")
public PropertiesConfig sparkNotFoundConfig() {
return new PropertiesConfig("test", null);
}
@Bean(name = "noQuartzConf")
public PropertiesConfig noQuartzConf() {
return new PropertiesConfig(null, null);
}
@Bean(name = "quartzConf")
public PropertiesConfig quartzConf() {
return new PropertiesConfig("src/test/resources", null);
}
@Bean(name = "quartzNotFoundConfig")
public PropertiesConfig quartzNotFoundConfig() {
return new PropertiesConfig("test", null);
}
}
@Autowired
@Qualifier(value = "noLivyConf")
private PropertiesConfig noLivyConf;
@Autowired
@Qualifier(value = "livyConf")
private PropertiesConfig livyConf;
@Autowired
@Qualifier(value = "livyNotFoundConfig")
private PropertiesConfig livyNotFoundConfig;
@Autowired
@Qualifier(value = "noQuartzConf")
private PropertiesConfig noQuartzConf;
@Autowired
@Qualifier(value = "quartzConf")
private PropertiesConfig quartzConf;
@Autowired
@Qualifier(value = "quartzNotFoundConfig")
private PropertiesConfig quartzNotFoundConfig;
@Test
public void appConf() throws Exception {
String conf = (String) noLivyConf.livyConfMap.get("name");
assertEquals("test", conf);
}
@Test
public void livyConfWithLocationNotNull() throws Exception {
String conf = (String) livyConf.livyConfMap.get("sparkJob.name");
assertEquals("testJob", conf);
}
@Test
public void livyConfWithLocationNull() throws Exception {
String conf = (String) noLivyConf.livyConfMap.get("sparkJob.name");
assertEquals("testJob", conf);
}
@Test
public void quartzConfWithLocationNotNull() throws Exception {
Properties conf = quartzConf.quartzConf();
assertEquals(conf.get("org.quartz.scheduler.instanceName"),
"spring-boot-quartz-test");
}
@Test
public void quartzConfWithLocationNull() throws Exception {
Properties conf = noQuartzConf.quartzConf();
assertEquals(conf.get("org.quartz.scheduler.instanceName"),
"spring-boot-quartz-test");
}
}
| 4,026 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/config/EclipseLinkJpaConfigForTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.eclipse.persistence.config.PersistenceUnitProperties;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.orm.jpa.JpaBaseConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.JpaProperties;
import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.orm.jpa.vendor.AbstractJpaVendorAdapter;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaVendorAdapter;
import org.springframework.transaction.jta.JtaTransactionManager;
@TestConfiguration
@ComponentScan("org.apache.griffin.core")
public class EclipseLinkJpaConfigForTest extends JpaBaseConfiguration {
protected EclipseLinkJpaConfigForTest(
DataSource ds,
JpaProperties properties,
ObjectProvider<JtaTransactionManager> jtm,
ObjectProvider<TransactionManagerCustomizers> tmc) {
super(ds, properties, jtm, tmc);
}
@Override
protected AbstractJpaVendorAdapter createJpaVendorAdapter() {
return new EclipseLinkJpaVendorAdapter();
}
@Override
protected Map<String, Object> getVendorProperties() {
Map<String, Object> map = new HashMap<>();
map.put(PersistenceUnitProperties.WEAVING, "false");
map.put(PersistenceUnitProperties.DDL_GENERATION,
"create-or-extend-tables");
return map;
}
}
| 4,027 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/info/GriffinInfoControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.info;
import static org.hamcrest.CoreMatchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.griffin.core.util.URLHelper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@RunWith(SpringRunner.class)
@WebMvcTest(value = GriffinInfoController.class, secure = false)
public class GriffinInfoControllerTest {
@Autowired
private MockMvc mockMvc;
@Test
public void testGreeting() throws Exception {
mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/version"))
.andExpect(status().isOk())
.andExpect(jsonPath("$", is("0.5.0")));
}
}
| 4,028 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/FileExistPredicatorTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.tools.ant.util.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class FileExistPredicatorTest {
static String fileName = "_SUCCESS";
static String rootPath = "/tmp/";
@BeforeClass
public static void mkFile() throws IOException {
File fileDirectory = new File(rootPath); // to fix createFileExclusively exception
File file = new File(rootPath + fileName);
if (!fileDirectory.exists()) {
fileDirectory.mkdir();
}
if (!file.exists()) {
file.createNewFile();
}
}
@AfterClass
public static void deleteFile() {
File file = new File(rootPath + fileName);
if (file.exists()) {
FileUtils.delete(file);
}
}
@Test(expected = NullPointerException.class)
public void test_predicate_null() throws IOException {
SegmentPredicate predicate = new SegmentPredicate();
predicate.setConfig("test config");
Map<String, Object> configMap = new HashMap<>();
predicate.setConfigMap(configMap);
FileExistPredicator predicator = new FileExistPredicator(predicate);
assertTrue(predicator.predicate());
}
@Test
public void test_predicate() throws IOException {
SegmentPredicate predicate = new SegmentPredicate();
predicate.setConfig("test config");
Map<String, Object> configMap = new HashMap<>();
configMap.put("path", fileName);
configMap.put("root.path", rootPath);
predicate.setConfigMap(configMap);
FileExistPredicator predicator = new FileExistPredicator(predicate);
assertTrue(predicator.predicate());
configMap.put("path", "fileName");
predicate.setConfigMap(configMap);
assertFalse(predicator.predicate());
}
}
| 4,029 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INSTANCE_ID_DOES_NOT_EXIST;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_ID_DOES_NOT_EXIST;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_NAME_DOES_NOT_EXIST;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinJob;
import static org.apache.griffin.core.util.EntityMocksHelper.createJobInstance;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import java.util.Collections;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.exception.GriffinExceptionHandler;
import org.apache.griffin.core.exception.GriffinExceptionMessage;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.apache.griffin.core.util.URLHelper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
@RunWith(SpringRunner.class)
public class JobControllerTest {
private MockMvc mvc;
@Mock
private JobServiceImpl service;
@InjectMocks
private JobController controller;
@Before
public void setup() {
mvc = MockMvcBuilders
.standaloneSetup(controller)
.setControllerAdvice(new GriffinExceptionHandler())
.build();
}
@Test
public void testGetJobs() throws Exception {
AbstractJob jobBean = createGriffinJob();
jobBean.setJobName("job_name");
given(service.getAliveJobs(""))
.willReturn(Collections.singletonList(jobBean));
mvc.perform(
get(URLHelper.API_VERSION_PATH + "/jobs")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$[0]['job.name']", is("job_name")));
}
@Test
public void testDeleteJobByIdForSuccess() throws Exception {
doNothing().when(service).deleteJob(1L);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs/1"))
.andExpect(status().isNoContent());
}
@Test
public void testDeleteJobByIdForFailureWithNotFound() throws Exception {
doThrow(new GriffinException.NotFoundException(JOB_ID_DOES_NOT_EXIST))
.when(service).deleteJob(1L);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs/1"))
.andExpect(status().isNotFound());
}
@Test
public void testDeleteJobByIdForFailureWithException() throws Exception {
doThrow(new GriffinException.ServiceException("Failed to delete job",
new Exception()))
.when(service).deleteJob(1L);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs/1"))
.andExpect(status().isInternalServerError());
}
@Test
public void testDeleteJobByNameForSuccess() throws Exception {
String jobName = "jobName";
doNothing().when(service).deleteJob(jobName);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs").param("jobName"
, jobName))
.andExpect(status().isNoContent());
}
@Test
public void testDeleteJobByNameForFailureWithNotFound() throws Exception {
String jobName = "jobName";
doThrow(new GriffinException.NotFoundException(JOB_NAME_DOES_NOT_EXIST))
.when(service).deleteJob(jobName);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs").param("jobName"
, jobName))
.andExpect(status().isNotFound());
}
@Test
public void testDeleteJobByNameForFailureWithException() throws Exception {
String jobName = "jobName";
doThrow(new GriffinException.ServiceException("Failed to delete job",
new Exception()))
.when(service).deleteJob(jobName);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs").param("jobName"
, jobName))
.andExpect(status().isInternalServerError());
}
@Test
public void testFindInstancesOfJob() throws Exception {
int page = 0;
int size = 2;
JobInstanceBean jobInstance = new JobInstanceBean(1L, LivySessionStates
.State.RUNNING, "", "", null, null);
given(service.findInstancesOfJob(1L, page, size)).willReturn(Arrays
.asList(jobInstance));
mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances").param
("jobId", String.valueOf(1L))
.param("page", String.valueOf(page)).param("size",
String.valueOf(size)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0].state", is("RUNNING")));
}
@Test
public void testFindInstance() throws Exception {
JobInstanceBean jobInstance = new JobInstanceBean(1L, LivySessionStates
.State.RUNNING, "", "", null, null);
given(service.findInstance(1L)).willReturn(jobInstance);
mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances/1"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.state", is("RUNNING")));
}
@Test
public void testFindInstanceForFailureWithNotFound() throws Exception {
Long id = 1L;
doThrow(new GriffinException.NotFoundException(INSTANCE_ID_DOES_NOT_EXIST))
.when(service).findInstance(id);
mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances/1"))
.andExpect(status().isNotFound());
}
@Test
public void testJobInstanceWithGivenIdNotFound() throws Exception {
Long jobInstanceId = 2L;
doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage.JOB_INSTANCE_NOT_FOUND))
.when(service).findInstance(jobInstanceId);
mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances/2"))
.andExpect(status().isNotFound());
}
@Test
public void testGetHealthInfo() throws Exception {
JobHealth jobHealth = new JobHealth(1, 3);
given(service.getHealthInfo()).willReturn(jobHealth);
mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/health"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.healthyJobCount", is(1)));
}
@Test
public void testTriggerJobForSuccess() throws Exception {
Long id = 1L;
given(service.triggerJobById(id)).willReturn(null);
mvc.perform(post(URLHelper.API_VERSION_PATH + "/jobs/trigger/1"))
.andExpect(status().isOk());
}
@Test
public void testTriggerJobForFailureWithException() throws Exception {
doThrow(new GriffinException.ServiceException("Failed to trigger job",
new Exception()))
.when(service).triggerJobById(1L);
mvc.perform(post(URLHelper.API_VERSION_PATH + "/jobs/trigger/1"))
.andExpect(status().isInternalServerError());
}
}
| 4,030 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/JobInstanceBeanRepoTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.BUSY;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.IDLE;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_STARTED;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RECOVERING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RUNNING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.STARTING;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.apache.griffin.core.config.EclipseLinkJpaConfigForTest;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.apache.griffin.core.job.entity.VirtualJob;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.PropertySource;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@PropertySource("classpath:application.properties")
@ContextConfiguration(classes = {EclipseLinkJpaConfigForTest.class})
@DataJpaTest
public class JobInstanceBeanRepoTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private JobInstanceRepo jobInstanceRepo;
@MockBean
private IMetaStoreClient client;
@Before
public void setUp() {
setEntityManager();
}
@Test
public void testFindByJobIdWithPageable() {
Pageable pageRequest = new PageRequest(0, 10, Sort.Direction.DESC,
"tms");
List<JobInstanceBean> instances = jobInstanceRepo.findByJobId(1L,
pageRequest);
assertEquals(3, instances.size());
}
@Test
public void testFindByActiveState() {
LivySessionStates.State[] states = {STARTING, NOT_STARTED, RECOVERING,
IDLE, RUNNING, BUSY};
List<JobInstanceBean> list = jobInstanceRepo.findByActiveState(states);
assertEquals(1, list.size());
}
private void setEntityManager() {
VirtualJob job = new VirtualJob();
JobInstanceBean instance1 = new JobInstanceBean(1L, LivySessionStates
.State.SUCCESS,
"appId1", "http://domain.com/uri1", System.currentTimeMillis(),
System.currentTimeMillis());
instance1.setJob(job);
JobInstanceBean instance2 = new JobInstanceBean(2L, LivySessionStates
.State.ERROR,
"appId2", "http://domain.com/uri2", System.currentTimeMillis(),
System.currentTimeMillis());
instance2.setJob(job);
JobInstanceBean instance3 = new JobInstanceBean(2L, LivySessionStates
.State.STARTING,
"appId3", "http://domain.com/uri3", System.currentTimeMillis(),
System.currentTimeMillis());
instance3.setJob(job);
entityManager.persistAndFlush(job);
entityManager.persistAndFlush(instance1);
entityManager.persistAndFlush(instance2);
entityManager.persistAndFlush(instance3);
}
}
| 4,031 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/JobInstanceTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.util.EntityMocksHelper.createFileExistPredicate;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.apache.griffin.core.util.EntityMocksHelper.createJobDetail;
import static org.apache.griffin.core.util.EntityMocksHelper.createSimpleTrigger;
import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.repo.BatchJobRepo;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.job.repo.JobRepo;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.repo.GriffinMeasureRepo;
import org.apache.griffin.core.util.JsonUtil;
import org.apache.griffin.core.util.PropertiesUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.Trigger;
import org.quartz.TriggerKey;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class JobInstanceTest {
@TestConfiguration
public static class jobInstanceBean {
@Bean
public JobInstance instance() {
return new JobInstance();
}
@Bean(name = "appConf")
public Properties sparkJobProps() {
String path = "application.properties";
return PropertiesUtil.getProperties(path,
new ClassPathResource(path));
}
@Bean(name = "schedulerFactoryBean")
public SchedulerFactoryBean factoryBean() {
return new SchedulerFactoryBean();
}
}
@Autowired
private JobInstance jobInstance;
@Autowired
@Qualifier("appConf")
private Properties appConfProps;
@MockBean
private JobInstanceRepo instanceRepo;
@MockBean
private SchedulerFactoryBean factory;
@MockBean
private GriffinMeasureRepo measureRepo;
@MockBean
private BatchJobRepo jobRepo;
@MockBean
private JobRepo<AbstractJob> repo;
@Test
@SuppressWarnings("unchecked")
public void testExecute() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
Scheduler scheduler = mock(Scheduler.class);
GriffinMeasure measure = createGriffinMeasure("measureName");
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
BatchJob job = new BatchJob(1L, "jobName",
"qName", "qGroup", false);
job.setConfigMap(new HashMap<>());
List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0));
given(context.getJobDetail()).willReturn(jd);
given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure);
given(repo.findOne(Matchers.anyLong())).willReturn(job);
given(factory.getScheduler()).willReturn(scheduler);
given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any(
JobKey.class))).willReturn(triggers);
given(scheduler.checkExists(Matchers.any(TriggerKey.class)))
.willReturn(false);
given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job);
given(scheduler.checkExists(Matchers.any(JobKey.class)))
.willReturn(false);
Trigger trigger = mock(Trigger.class);
given(context.getTrigger()).willReturn(trigger);
given(trigger.getKey()).willReturn(new TriggerKey("test"));
jobInstance.execute(context);
verify(measureRepo, times(1)).findOne(Matchers.anyLong());
verify(factory, times(4)).getScheduler();
verify(scheduler, times(1)).getTriggersOfJob(Matchers.any(
JobKey.class));
}
@SuppressWarnings("unchecked")
@Test
public void testExecuteWithRangeLessThanZero() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
Scheduler scheduler = mock(Scheduler.class);
GriffinMeasure measure = createGriffinMeasure("measureName");
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
BatchJob job = new BatchJob(1L, "jobName", "qName", "qGroup", false);
List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0));
given(context.getJobDetail()).willReturn(jd);
given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure);
given(jobRepo.findOne(Matchers.anyLong())).willReturn(job);
given(factory.getScheduler()).willReturn(scheduler);
given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any(
JobKey.class))).willReturn(triggers);
given(scheduler.checkExists(Matchers.any(TriggerKey.class)))
.willReturn(false);
given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job);
given(scheduler.checkExists(Matchers.any(JobKey.class)))
.willReturn(false);
jobInstance.execute(context);
}
@SuppressWarnings("unchecked")
@Test
public void testExecuteWithRangeGreaterThanDataUnit() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
Scheduler scheduler = mock(Scheduler.class);
GriffinMeasure measure = createGriffinMeasure("measureName");
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
BatchJob job = new BatchJob(1L, "jobName", "qName", "qGroup", false);
List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0));
given(context.getJobDetail()).willReturn(jd);
given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure);
given(jobRepo.findOne(Matchers.anyLong())).willReturn(job);
given(factory.getScheduler()).willReturn(scheduler);
given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any(
JobKey.class))).willReturn(triggers);
given(scheduler.checkExists(Matchers.any(TriggerKey.class)))
.willReturn(false);
given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job);
given(scheduler.checkExists(Matchers.any(JobKey.class)))
.willReturn(false);
jobInstance.execute(context);
verify(context, times(1)).getJobDetail();
}
@SuppressWarnings("unchecked")
@Test
public void testExecuteWithPredicate() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
Scheduler scheduler = mock(Scheduler.class);
GriffinMeasure measure = createGriffinMeasure("measureName",
createFileExistPredicate(), createFileExistPredicate());
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
BatchJob job = new BatchJob(1L, "jobName",
"qName", "qGroup", false);
List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0));
given(context.getJobDetail()).willReturn(jd);
given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure);
given(jobRepo.findOne(Matchers.anyLong())).willReturn(job);
given(factory.getScheduler()).willReturn(scheduler);
given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any(
JobKey.class))).willReturn(triggers);
given(scheduler.checkExists(Matchers.any(TriggerKey.class)))
.willReturn(false);
given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job);
given(scheduler.checkExists(Matchers.any(JobKey.class)))
.willReturn(false);
jobInstance.execute(context);
verify(context, times(1)).getJobDetail();
}
@Test
public void testExecuteWithNullException() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
jobInstance.execute(context);
assertTrue(true);
}
}
| 4,032 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.job.repo.JobRepo;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.quartz.*;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.Collections;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinJob;
import static org.apache.griffin.core.util.EntityMocksHelper.createJobInstance;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.internal.verification.VerificationModeFactory.times;
@RunWith(SpringRunner.class)
public class JobServiceImplTest {
@Mock
private JobRepo<AbstractJob> jobRepo;
@Mock
private SchedulerFactoryBean factory;
@Mock
private JobInstanceRepo instanceRepo;
@InjectMocks
private JobServiceImpl jobService;
@Test
public void testTriggerJobById() throws SchedulerException {
Long jobId = 1L;
AbstractJob job = createGriffinJob();
given(jobRepo.findByIdAndDeleted(jobId,false)).willReturn(job);
Scheduler scheduler = mock(Scheduler.class);
given(scheduler.checkExists(any(JobKey.class))).willReturn(true);
ListenerManager listenerManager = mock(ListenerManager.class);
given(scheduler.getListenerManager()).willReturn(listenerManager);
given(factory.getScheduler()).willReturn(scheduler);
JobInstanceBean jobInstanceBean = createJobInstance();
given(instanceRepo.findByTriggerKey(anyString())).willReturn(Collections.singletonList(jobInstanceBean));
String result = jobService.triggerJobById(jobId);
assertTrue(result.matches("DEFAULT\\.[0-9a-f\\-]{49}"));
verify(scheduler, times(1)).scheduleJob(any());
}
@Test(expected = GriffinException.NotFoundException.class)
public void testTriggerJobByIdFail() throws SchedulerException {
Long jobId = 1L;
given(jobRepo.findByIdAndDeleted(jobId,false)).willReturn(null);
jobService.triggerJobById(jobId);
}
}
| 4,033 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/EventServiceTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import java.util.ArrayList;
import java.util.List;
import org.apache.griffin.core.event.EventSourceType;
import org.apache.griffin.core.event.EventType;
import org.apache.griffin.core.event.GriffinEvent;
import org.apache.griffin.core.event.GriffinHook;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.entity.JobDataSegment;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.util.EntityMocksHelper;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@DataJpaTest
@ComponentScan("org.apache.griffin.core")
public class EventServiceTest {
@Autowired
private JobService jobService;
@Autowired
private TestEntityManager entityManager;
@Autowired
private List<GriffinEvent> eventList;
@MockBean
private IMetaStoreClient client;
@Before
public void setup() throws Exception {
entityManager.clear();
entityManager.flush();
setEntityManager();
}
@Test
public void testAddJobEvent() throws Exception {
BatchJob batch_Job = EntityMocksHelper.createGriffinJob();
batch_Job.setCronExpression("0 0 12 * * ?");
batch_Job.setTimeZone("Asia/Shanghai");
JobDataSegment jds = new JobDataSegment();
jds.setAsTsBaseline(true);
jds.setDataConnectorName("target_name");
List jds_list = new ArrayList();
jds_list.add(jds);
batch_Job.setSegments(jds_list);
jobService.addJob(batch_Job);
Assert.assertEquals(2, eventList.size());
Assert.assertEquals(EventType.CREATION_EVENT, eventList.get(0).getType());
Assert.assertEquals(EventSourceType.JOB, eventList.get(1).getSourceType());
}
public void setEntityManager() throws Exception {
Measure measure1 = createGriffinMeasure("m1");
measure1.setOrganization("org1");
((GriffinMeasure) measure1).setProcessType(GriffinMeasure.ProcessType.BATCH);
entityManager.persistAndFlush(measure1);
}
@Configuration(value = "GriffinTestJobEventHook")
public static class TestJobEventHook implements GriffinHook {
private List<GriffinEvent> eventList = new ArrayList<>();
@Override
public void onEvent(GriffinEvent event) throws GriffinException {
eventList.add(event);
}
@Bean
public List<GriffinEvent> getReceivedEvents() {
return eventList;
}
}
}
| 4,034 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import org.apache.griffin.core.config.PropertiesConfig;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.util.JsonUtil;
import org.apache.griffin.core.util.PropertiesUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.web.client.RestTemplate;
import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
import static org.apache.griffin.core.util.EntityMocksHelper.*;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(SpringRunner.class)
public class SparkSubmitJobTest {
@TestConfiguration
public static class SchedulerServiceConfiguration {
@Bean
public SparkSubmitJob sparkSubmitJobBean() {
return new SparkSubmitJob();
}
@Bean(name = "livyConf")
public Properties sparkJobProps() {
String path = "sparkJob.properties";
return PropertiesUtil.getProperties(path,
new ClassPathResource(path));
}
@Bean
public PropertiesConfig sparkConf() {
return new PropertiesConfig("src/test/resources", null);
}
}
@Autowired
private SparkSubmitJob sparkSubmitJob;
@MockBean
private RestTemplate restTemplate;
@MockBean
private JobInstanceRepo jobInstanceRepo;
@MockBean
private JobServiceImpl jobService;
@MockBean
private BatchJobOperatorImpl batchJobOp;
@MockBean
private LivyTaskSubmitHelper livyTaskSubmitHelper;
@Before
public void setUp() {
}
@Test
public void testExecuteWithPredicateTriggerGreaterThanRepeat()
throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
JobInstanceBean instance = createJobInstance();
GriffinMeasure measure = createGriffinMeasure("measureName");
SegmentPredicate predicate = createFileExistPredicate();
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson
(Collections.singletonList(predicate)));
given(context.getJobDetail()).willReturn(jd);
given(context.getTrigger()).willReturn(createSimpleTrigger(4, 5));
given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
.willReturn(instance);
sparkSubmitJob.execute(context);
verify(context, times(1)).getJobDetail();
verify(jobInstanceRepo, times(1)).findByPredicateName(
Matchers.anyString());
}
@Test
public void testExecuteWithPredicateTriggerLessThanRepeat() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
JobInstanceBean instance = createJobInstance();
GriffinMeasure measure = createGriffinMeasure("measureName");
SegmentPredicate predicate = createFileExistPredicate();
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson
(Collections.singletonList(predicate)));
given(context.getJobDetail()).willReturn(jd);
given(context.getTrigger()).willReturn(createSimpleTrigger(4, 4));
given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
.willReturn(instance);
sparkSubmitJob.execute(context);
verify(context, times(1)).getJobDetail();
verify(jobInstanceRepo, times(1)).findByPredicateName(
Matchers.anyString());
}
@Test
public void testExecuteWithNoPredicateSuccess() throws Exception {
String result = "{\"id\":1,\"state\":\"starting\",\"appId\":null," +
"\"appInfo\":{\"driverLogUrl\":null," +
"\"sparkUiUrl\":null},\"log\":[]}";
JobExecutionContext context = mock(JobExecutionContext.class);
JobInstanceBean instance = createJobInstance();
GriffinMeasure measure = createGriffinMeasure("measureName");
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
given(context.getJobDetail()).willReturn(jd);
given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
.willReturn(instance);
sparkSubmitJob.execute(context);
verify(context, times(1)).getJobDetail();
verify(jobInstanceRepo, times(1)).findByPredicateName(
Matchers.anyString());
}
@Test
public void testExecuteWithPost2LivyException() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
JobInstanceBean instance = createJobInstance();
GriffinMeasure measure = createGriffinMeasure("measureName");
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), "");
given(context.getJobDetail()).willReturn(jd);
given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
.willReturn(instance);
sparkSubmitJob.execute(context);
verify(context, times(1)).getJobDetail();
verify(jobInstanceRepo, times(1)).findByPredicateName(
Matchers.anyString());
}
@Test
public void testExecuteWithNullException() {
JobExecutionContext context = mock(JobExecutionContext.class);
sparkSubmitJob.execute(context);
}
@Test
public void testMultiplePredicatesWhichReturnsTrue() throws Exception {
JobExecutionContext context = mock(JobExecutionContext.class);
JobInstanceBean instance = createJobInstance();
GriffinMeasure measure = createGriffinMeasure("measureName");
SegmentPredicate predicate = createMockPredicate();
SegmentPredicate secondPredicate = createMockPredicate();
JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson
(Arrays.asList(predicate, secondPredicate)));
given(context.getJobDetail()).willReturn(jd);
given(context.getTrigger()).willReturn(createSimpleTrigger(4, 5));
given(jobInstanceRepo.findByPredicateName(Matchers.anyString()))
.willReturn(instance);
sparkSubmitJob.execute(context);
verify(context, times(1)).getJobDetail();
verify(jobInstanceRepo, times(1)).findByPredicateName(
Matchers.anyString());
verify(jobInstanceRepo, times(1)).save(instance);
}
}
| 4,035 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/entity/JobScheduleTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
public class JobScheduleTest {
}
| 4,036 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/repo/JobRepoTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.repo;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.apache.griffin.core.config.EclipseLinkJpaConfigForTest;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.entity.VirtualJob;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@DataJpaTest
@ContextConfiguration(classes = {EclipseLinkJpaConfigForTest.class})
public class JobRepoTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private JobRepo jobRepo;
@MockBean
private IMetaStoreClient client;
@Before
public void setup() {
entityManager.clear();
entityManager.flush();
setEntityManager();
}
@Test
public void testCountByJobNameAndDeleted() {
int count = jobRepo.countByJobNameAndDeleted("griffinJobName1", false);
assertEquals(count, 1);
}
@Test
public void testFindByDeleted() {
List<AbstractJob> jobs = jobRepo.findByDeleted(false);
assertEquals(jobs.size(), 4);
}
@Test
public void findByJobNameAndDeleted() {
List<AbstractJob> jobs = jobRepo
.findByJobNameAndDeleted("griffinJobName1", false);
assertEquals(jobs.size(), 1);
}
@Test
public void findByMeasureIdAndDeleted() {
List<AbstractJob> jobs = jobRepo.findByMeasureIdAndDeleted(1L, false);
assertEquals(jobs.size(), 4);
}
@Test
public void findByIdAndDeleted() {
AbstractJob job = jobRepo.findByIdAndDeleted(1L, true);
assert job == null;
}
public void setEntityManager() {
AbstractJob job1 = new BatchJob(1L, "griffinJobName1", "qName1",
"qGroup1", false);
AbstractJob job2 = new BatchJob(1L, "griffinJobName2", "qName2",
"qGroup2", false);
AbstractJob job3 = new VirtualJob("virtualJobName1", 1L, "metricName1");
AbstractJob job4 = new VirtualJob("virtualJobName2", 1L, "metricName2");
entityManager.persistAndFlush(job1);
entityManager.persistAndFlush(job2);
entityManager.persistAndFlush(job3);
entityManager.persistAndFlush(job4);
}
}
| 4,037 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/repo/JobInstanceRepoTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.repo;
import static org.apache.griffin.core.job.entity.LivySessionStates.State;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.BUSY;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.FINDING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.IDLE;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_FOUND;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_STARTED;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RECOVERING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RUNNING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.STARTING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.SUCCESS;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.ArrayList;
import java.util.List;
import org.apache.avro.generic.GenericData;
import org.apache.griffin.core.config.EclipseLinkJpaConfigForTest;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.StreamingJob;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@DataJpaTest
@ContextConfiguration(classes = {EclipseLinkJpaConfigForTest.class})
public class JobInstanceRepoTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private JobInstanceRepo jobInstanceRepo;
@MockBean
private IMetaStoreClient client;
private List<Long> entityIds;
@Before
public void setup() {
entityManager.clear();
entityManager.flush();
setEntityManager();
}
@Test
public void testFindByActiveState() {
State[] states = {STARTING, NOT_STARTED, RECOVERING, IDLE, RUNNING,
BUSY};
List<JobInstanceBean> beans = jobInstanceRepo.findByActiveState(states);
assertThat(beans.size()).isEqualTo(1);
}
@Test
public void testFindByPredicateName() {
JobInstanceBean bean = jobInstanceRepo.findByPredicateName("pName1");
assertThat(bean).isNotNull();
}
@Test
public void testFindByInstanceId() {
JobInstanceBean bean = jobInstanceRepo.findByInstanceId(entityIds.get(0));
assertThat(bean).isNotNull();
}
@Test
public void testFindByExpireTmsLessThanEqual() {
List<JobInstanceBean> beans = jobInstanceRepo
.findByExpireTmsLessThanEqual(1516004640092L);
assertThat(beans.size()).isEqualTo(2);
}
@Test
public void testDeleteByExpireTimestamp() {
int count = jobInstanceRepo.deleteByExpireTimestamp(1516004640092L);
assertThat(count).isEqualTo(2);
}
private void setEntityManager() {
JobInstanceBean bean1 = new JobInstanceBean(
FINDING,
"pName1",
"pGroup1",
null,
1516004640092L);
JobInstanceBean bean2 = new JobInstanceBean(
NOT_FOUND,
"pName2",
"pGroup2",
null,
1516004640093L);
JobInstanceBean bean3 = new JobInstanceBean(
RUNNING,
"pName3",
"pGroup3",
null,
1516004640082L);
JobInstanceBean bean4 = new JobInstanceBean(
SUCCESS,
"pName4",
"pGroup4",
null,
1516004640094L);
BatchJob job1 = new BatchJob();
StreamingJob job2 = new StreamingJob();
bean1.setJob(job1);
bean2.setJob(job1);
bean3.setJob(job2);
bean4.setJob(job2);
entityManager.persistAndFlush(job1);
entityManager.persistAndFlush(job2);
entityManager.persistAndFlush(bean1);
entityManager.persistAndFlush(bean2);
entityManager.persistAndFlush(bean3);
entityManager.persistAndFlush(bean4);
entityIds = new ArrayList<>();
entityIds.add(bean1.getId());
entityIds.add(bean2.getId());
entityIds.add(bean3.getId());
entityIds.add(bean4.getId());
}
}
| 4,038 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/job/factory/PredicatorFactoryTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.factory;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.FileExistPredicator;
import org.apache.griffin.core.job.Predicator;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.util.PredicatorMock;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.junit4.SpringRunner;
import java.io.IOException;
import java.util.HashMap;
import static org.apache.griffin.core.util.EntityMocksHelper.createFileExistPredicate;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@RunWith(SpringRunner.class)
public class PredicatorFactoryTest {
@Test
public void testFileExistPredicatorCreation() throws IOException {
Predicator predicator = PredicatorFactory.newPredicateInstance(createFileExistPredicate());
assertNotNull(predicator);
assertTrue(predicator instanceof FileExistPredicator);
}
@Test(expected = GriffinException.NotFoundException.class)
public void testUnknownPredicator() throws JsonProcessingException {
PredicatorFactory.newPredicateInstance(
new SegmentPredicate("unknown", null));
}
@Test
public void testPluggablePredicator() throws JsonProcessingException {
String predicatorClass = "org.apache.griffin.core.util.PredicatorMock";
HashMap<String, Object> map = new HashMap<>();
map.put("class", predicatorClass);
SegmentPredicate segmentPredicate = new SegmentPredicate("custom", null);
segmentPredicate.setConfigMap(map);
Predicator predicator = PredicatorFactory.newPredicateInstance(segmentPredicate);
assertNotNull(predicator);
assertTrue(predicator instanceof PredicatorMock);
}
}
| 4,039 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/MeasureOrgControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.hasSize;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.griffin.core.util.URLHelper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@RunWith(SpringRunner.class)
@WebMvcTest(value = MeasureOrgController.class, secure = false)
public class MeasureOrgControllerTest {
@Autowired
private MockMvc mockMvc;
@MockBean
private MeasureOrgService measureOrgService;
@Test
public void testGetOrgs() throws Exception {
String org = "orgName";
when(measureOrgService.getOrgs()).thenReturn(Arrays.asList(org));
mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/org"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0]", is(org)));
}
@Test
public void testGetMetricNameListByOrg() throws Exception {
String org = "hadoop";
when(measureOrgService.getMetricNameListByOrg(org)).thenReturn(Arrays
.asList(org));
mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/org/{org}", org))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0]", is(org)));
}
@Test
public void testGetMeasureNamesGroupByOrg() throws Exception {
List<String> measures = Arrays.asList("measureName");
Map<String, List<String>> map = new HashMap<>();
map.put("orgName", measures);
when(measureOrgService.getMeasureNamesGroupByOrg()).thenReturn(map);
mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/org/measure/names"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.orgName", hasSize(1)));
}
}
| 4,040 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure;
import static org.apache.griffin.core.util.EntityMocksHelper.createExternalMeasure;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.junit.Assert.assertEquals;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.measure.entity.ExternalMeasure;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.measure.repo.MeasureRepo;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.quartz.SchedulerException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@Component
public class MeasureServiceImplTest {
@InjectMocks
private MeasureServiceImpl service;
@Mock
private MeasureOperator externalOp;
@Mock
private MeasureOperator griffinOp;
@Mock
private MeasureRepo<Measure> measureRepo;
@Value("${hive.hmshandler.retry.attempts}")
private String attempts;
@Test
public void test() {
System.out.println(attempts);
}
@Before
public void setup() {
}
@Test
public void testGetAllMeasures() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.findByDeleted(false)).willReturn(Collections
.singletonList(measure));
List<? extends Measure> measures = service.getAllAliveMeasures("");
assertEquals(measures.size(), 1);
assertEquals(measures.get(0).getName(), "view_item_hourly");
}
@Test
public void testGetMeasuresById() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.findByIdAndDeleted(1L, false)).willReturn(measure);
Measure m = service.getMeasureById(1);
assertEquals(m.getName(), measure.getName());
}
@Test(expected = GriffinException.NotFoundException.class)
public void testGetMeasuresByIdWithFileNotFoundException() {
given(measureRepo.findByIdAndDeleted(1L, false)).willReturn(null);
service.getMeasureById(1);
}
@Test
public void testGetAliveMeasuresByOwner() throws Exception {
String owner = "test";
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.findByOwnerAndDeleted(owner, false))
.willReturn(Collections.singletonList(measure));
List<Measure> measures = service.getAliveMeasuresByOwner(owner);
assertEquals(measures.get(0).getName(), measure.getName());
}
@Test
public void testDeleteMeasureByIdForGriffinSuccess() throws Exception {
GriffinMeasure measure = createGriffinMeasure("view_item_hourly");
measure.setId(1L);
given(measureRepo.findByIdAndDeleted(measure.getId(), false))
.willReturn(measure);
doNothing().when(griffinOp).delete(measure);
service.deleteMeasureById(measure.getId());
verify(griffinOp, times(1)).delete(measure);
}
@Test
public void testDeleteMeasureByIdForExternalSuccess() throws
SchedulerException {
ExternalMeasure measure = createExternalMeasure("externalMeasure");
measure.setId(1L);
given(measureRepo.findByIdAndDeleted(measure.getId(), false))
.willReturn(measure);
doNothing().when(externalOp).delete(measure);
service.deleteMeasureById(1L);
verify(externalOp, times(1)).delete(measure);
}
@Test(expected = GriffinException.NotFoundException.class)
public void testDeleteMeasureByIdFailureWithNotFound() throws
SchedulerException {
given(measureRepo.findByIdAndDeleted(1L, false)).willReturn(null);
service.deleteMeasureById(1L);
}
@Test(expected = GriffinException.ServiceException.class)
public void testDeleteMeasureByIdForGriffinFailureWithException() throws
Exception {
GriffinMeasure measure = createGriffinMeasure("externalMeasure");
measure.setId(1L);
given(measureRepo.findByIdAndDeleted(measure.getId(), false))
.willReturn(measure);
doThrow(new GriffinException.ServiceException("Failed to delete job",
new Exception()))
.when(griffinOp).delete(measure);
service.deleteMeasureById(1L);
}
@Test
public void testDeleteMeasuresForGriffinSuccess() throws Exception {
GriffinMeasure measure = createGriffinMeasure("view_item_hourly");
measure.setId(1L);
given(measureRepo.findByDeleted(false)).willReturn(Arrays
.asList(measure));
doNothing().when(griffinOp).delete(measure);
service.deleteMeasures();
}
@Test
public void testDeleteMeasuresForExternalSuccess() throws SchedulerException {
ExternalMeasure measure = createExternalMeasure("externalMeasure");
measure.setId(1L);
given(measureRepo.findByDeleted(false)).willReturn(Arrays
.asList(measure));
doNothing().when(externalOp).delete(measure);
service.deleteMeasures();
}
@Test(expected = GriffinException.ServiceException.class)
public void testDeleteMeasuresForGriffinFailureWithException() throws
Exception {
GriffinMeasure measure = createGriffinMeasure("externalMeasure");
measure.setId(1L);
given(measureRepo.findByDeleted(false)).willReturn(Arrays
.asList(measure));
doThrow(new GriffinException.ServiceException("Failed to delete job",
new Exception()))
.when(griffinOp).delete(measure);
service.deleteMeasures();
}
@Test
public void testCreateMeasureForGriffinSuccess() throws Exception {
String measureName = "view_item_hourly";
GriffinMeasure griffinMeasure = createGriffinMeasure(measureName);
given(measureRepo.findByNameAndDeleted(measureName, false))
.willReturn(new ArrayList<>());
given(griffinOp.create(griffinMeasure)).willReturn(griffinMeasure);
Measure measure = service.createMeasure(griffinMeasure);
assertEquals(measure.getName(), griffinMeasure.getName());
}
@Test
public void testCreateMeasureForExternalSuccess() {
String measureName = "view_item_hourly";
ExternalMeasure externalMeasure = createExternalMeasure(measureName);
given(measureRepo.findByNameAndDeleted(measureName, false))
.willReturn(new ArrayList<>());
given(externalOp.create(externalMeasure)).willReturn(externalMeasure);
Measure measure = service.createMeasure(externalMeasure);
assertEquals(measure.getName(), externalMeasure.getName());
}
@Test(expected = GriffinException.ConflictException.class)
public void testCreateMeasureForFailureWithDuplicate() throws Exception {
String measureName = "view_item_hourly";
GriffinMeasure measure = createGriffinMeasure(measureName);
given(measureRepo.findByNameAndDeleted(measureName, false))
.willReturn(Collections.singletonList(measure));
service.createMeasure(measure);
}
@Test
public void testUpdateMeasureForGriffinSuccess() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.findByIdAndDeleted(measure.getId(), false))
.willReturn(measure);
doReturn(measure).when(externalOp).update(measure);
service.updateMeasure(measure);
verify(griffinOp, times(1)).update(measure);
}
@Test(expected = GriffinException.BadRequestException.class)
public void testUpdateMeasureForGriffinFailureWithDiffType() throws
Exception {
Measure griffinMeasure = createGriffinMeasure("view_item_hourly");
Measure externalMeasure = createExternalMeasure("externalName");
given(measureRepo.findByIdAndDeleted(griffinMeasure.getId(), false))
.willReturn(externalMeasure);
service.updateMeasure(griffinMeasure);
}
@Test(expected = GriffinException.NotFoundException.class)
public void testUpdateMeasureForFailureWithNotFound() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.findByIdAndDeleted(measure.getId(), false))
.willReturn(null);
service.updateMeasure(measure);
}
@Test
public void testUpdateMeasureForExternal() {
ExternalMeasure measure = createExternalMeasure
("external_view_item_hourly");
given(measureRepo.findByIdAndDeleted(measure.getId(), false))
.willReturn(measure);
doReturn(measure).when(externalOp).update(measure);
service.updateMeasure(measure);
verify(externalOp, times(1)).update(measure);
}
}
| 4,041 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/ExternalMeasureOperatorImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure;
import static org.apache.griffin.core.util.EntityMocksHelper.createExternalMeasure;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.VirtualJob;
import org.apache.griffin.core.job.repo.VirtualJobRepo;
import org.apache.griffin.core.measure.entity.ExternalMeasure;
import org.apache.griffin.core.measure.repo.ExternalMeasureRepo;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class ExternalMeasureOperatorImplTest {
@InjectMocks
private ExternalMeasureOperatorImpl operator;
@Mock
private ExternalMeasureRepo measureRepo;
@Mock
private VirtualJobRepo jobRepo;
@Before
public void setup() {
}
@Test
public void testCreateForSuccess() {
ExternalMeasure measure = createExternalMeasure("view_item_hourly");
given(measureRepo.save(measure)).willReturn(measure);
given(jobRepo.save(Matchers.any(VirtualJob.class))).willReturn(
new VirtualJob());
operator.create(measure);
verify(jobRepo, times(1)).save(new VirtualJob());
}
@Test(expected = GriffinException.BadRequestException.class)
public void testCreateForFailureWithBlankMetricName() {
String measureName = "view_item_hourly";
ExternalMeasure measure = createExternalMeasure(measureName);
measure.setMetricName(" ");
operator.create(measure);
}
@Test
public void testUpdateForSuccess() {
ExternalMeasure measure = createExternalMeasure("view_item_hourly");
measure.setId(1L);
given(measureRepo.findOne(1L)).willReturn(measure);
given(measureRepo.save(Matchers.any(ExternalMeasure.class)))
.willReturn(measure);
operator.create(measure);
verify(measureRepo, times(1)).save(
Matchers.any(ExternalMeasure.class));
}
@Test(expected = GriffinException.BadRequestException.class)
public void testUpdateForFailureWithBlankMetricName() {
String measureName = "view_item_hourly";
ExternalMeasure measure = createExternalMeasure(measureName);
measure.setMetricName(" ");
operator.update(measure);
}
@Test
public void testDeleteForSuccess() {
ExternalMeasure measure = createExternalMeasure("view_item_hourly");
given(measureRepo.save(measure)).willReturn(measure);
operator.delete(measure);
verify(measureRepo, times(1)).save(measure);
}
}
| 4,042 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/MeasureOrgServiceImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.apache.griffin.core.util.EntityMocksHelper.createJobDetailMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.repo.GriffinMeasureRepo;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class MeasureOrgServiceImplTest {
@InjectMocks
private MeasureOrgServiceImpl service;
@Mock
private GriffinMeasureRepo measureRepo;
@Test
public void testGetOrgs() {
String orgName = "orgName";
given(measureRepo.findOrganizations(false)).willReturn(Arrays
.asList(orgName));
List<String> orgs = service.getOrgs();
assertThat(orgs.size()).isEqualTo(1);
assertThat(orgs.get(0)).isEqualTo(orgName);
}
@Test
public void testGetMetricNameListByOrg() {
String orgName = "orgName";
String measureName = "measureName";
given(measureRepo.findNameByOrganization(orgName, false))
.willReturn(Arrays.asList(measureName));
List<String> measureNames = service.getMetricNameListByOrg(orgName);
assertThat(measureNames.size()).isEqualTo(1);
assertThat(measureNames.get(0)).isEqualTo(measureName);
}
@Test
public void testGetMeasureNamesGroupByOrg() throws Exception {
GriffinMeasure measure = createGriffinMeasure("measure");
when(measureRepo.findByDeleted(false)).thenReturn(Arrays
.asList(measure));
Map<String, List<String>> map = service.getMeasureNamesGroupByOrg();
assertThat(map.size()).isEqualTo(1);
}
@Test
public void testGetMeasureNamesGroupByOrgWithNull() {
when(measureRepo.findByDeleted(false)).thenReturn(new ArrayList<>());
Map<String, List<String>> map = service.getMeasureNamesGroupByOrg();
assert map.size() == 0;
}
@Test
public void testGetMeasureWithJobDetailsGroupByOrgForSuccess()
throws Exception {
String measureName = "measureName";
String measureId = "1";
GriffinMeasure measure = createGriffinMeasure(measureName);
measure.setOrganization("org");
measure.setId(Long.valueOf(measureId));
given(measureRepo.findByDeleted(false)).willReturn(Arrays
.asList(measure));
Map<String, Object> jobDetail = createJobDetailMap();
List<Map<String, Object>> jobList = Arrays.asList(jobDetail);
Map<String, List<Map<String, Object>>> measuresById = new HashMap<>();
measuresById.put(measureId, jobList);
Map<String, Map<String, List<Map<String, Object>>>> map = service
.getMeasureWithJobDetailsGroupByOrg(measuresById);
assertThat(map.size()).isEqualTo(1);
assertThat(map).containsKey("org");
assertThat(map.get("org").get(measureName)).isEqualTo(jobList);
}
@Test
public void testGetMeasureWithJobDetailsGroupByOrgForFailure()
throws Exception {
Map detail = new HashMap();
given(measureRepo.findByDeleted(false)).willReturn(null);
Map map = service.getMeasureWithJobDetailsGroupByOrg(detail);
assert map == null;
}
}
| 4,043 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/GriffinMeasureOperatorImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure;
import static org.apache.griffin.core.util.EntityMocksHelper.createDataConnector;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.junit.Assert.assertEquals;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.JobServiceImpl;
import org.apache.griffin.core.measure.entity.DataConnector;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.measure.repo.DataConnectorRepo;
import org.apache.griffin.core.measure.repo.MeasureRepo;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class GriffinMeasureOperatorImplTest {
@InjectMocks
private GriffinMeasureOperatorImpl operator;
@Mock
private MeasureRepo<Measure> measureRepo;
@Mock
private DataConnectorRepo dcRepo;
@Mock
private JobServiceImpl jobService;
@Before
public void setup() {
}
@Test
public void testCreateForSuccess() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.save(measure)).willReturn(measure);
Measure m = operator.create(measure);
assertEquals(m.getName(), measure.getName());
}
@Test(expected = GriffinException.BadRequestException.class)
public void testCreateForFailureWithConnectorNull() throws Exception {
String measureName = "view_item_hourly";
DataConnector dcSource = createDataConnector(null, "default",
"test_data_src", "dt=#YYYYMMdd# AND hour =#HH#");
DataConnector dcTarget = createDataConnector(null, "default",
"test_data_tgt", "dt=#YYYYMMdd# AND hour =#HH#");
GriffinMeasure measure = createGriffinMeasure(measureName, dcSource,
dcTarget);
operator.create(measure);
}
@Test
public void testUpdateForSuccess() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(measureRepo.save(measure)).willReturn(measure);
operator.update(measure);
verify(measureRepo, times(1)).save(measure);
}
@Test
public void testDeleteForSuccess() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
measure.setId(1L);
doNothing().when(jobService).deleteJobsRelateToMeasure(1L);
given(measureRepo.save(measure)).willReturn(measure);
operator.update(measure);
verify(measureRepo, times(1)).save(measure);
}
@Test(expected = GriffinException.ServiceException.class)
public void testDeleteForFailureWithPauseJob() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
measure.setId(1L);
doThrow(new GriffinException.ServiceException("Service exception",
new RuntimeException()))
.when(jobService).deleteJobsRelateToMeasure(1L);
operator.delete(measure);
}
}
| 4,044 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.exception.GriffinExceptionHandler;
import org.apache.griffin.core.exception.GriffinExceptionMessage;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.util.JsonUtil;
import org.apache.griffin.core.util.URLHelper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
@RunWith(SpringRunner.class)
public class MeasureControllerTest {
private MockMvc mvc;
@Mock
private MeasureServiceImpl service;
@InjectMocks
private MeasureController controller;
@Before
public void setup() {
mvc = MockMvcBuilders
.standaloneSetup(controller)
.setControllerAdvice(new GriffinExceptionHandler())
.build();
}
@Test
public void testGetAllMeasures() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
Mockito.<List<? extends Measure>>when(service.getAllAliveMeasures(""))
.thenReturn(Collections.singletonList(measure));
mvc.perform(get(URLHelper.API_VERSION_PATH + "/measures"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0].name", is("view_item_hourly")));
}
@Test
public void testGetMeasuresById() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
given(service.getMeasureById(1L)).willReturn(measure);
mvc.perform(get(URLHelper.API_VERSION_PATH + "/measures/1"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.name", is("view_item_hourly")));
}
@Test
public void testDeleteMeasureByIdForSuccess() throws Exception {
doNothing().when(service).deleteMeasureById(1L);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures/1"))
.andExpect(status().isNoContent());
}
@Test
public void testDeleteMeasureByIdForNotFound() throws Exception {
doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage
.MEASURE_ID_DOES_NOT_EXIST))
.when(service).deleteMeasureById(1L);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures/1"))
.andExpect(status().isNotFound());
}
@Test
public void testDeleteMeasureByIdForGriffinFailureWithException() throws
Exception {
doThrow(new GriffinException.ServiceException("Failed to delete job",
new Exception()))
.when(service).deleteMeasureById(1L);
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures/1"))
.andExpect(status().isInternalServerError());
}
@Test
public void testDeleteMeasuresForSuccess() throws Exception {
doNothing().when(service).deleteMeasures();
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures"))
.andExpect(status().isNoContent());
}
@Test
public void testDeleteMeasuresForNotFound() throws Exception {
doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage
.MEASURE_ID_DOES_NOT_EXIST))
.when(service).deleteMeasures();
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures"))
.andExpect(status().isNotFound());
}
@Test
public void testDeleteMeasuresForGriffinFailureWithException()
throws Exception {
doThrow(new GriffinException.ServiceException("Failed to delete job",
new Exception()))
.when(service).deleteMeasures();
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures"))
.andExpect(status().isInternalServerError());
}
@Test
public void testUpdateMeasureForSuccess() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
String measureJson = JsonUtil.toJson(measure);
doReturn(measure).when(service).updateMeasure(measure);
mvc.perform(put(URLHelper.API_VERSION_PATH + "/measures")
.contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isOk())
.andExpect(jsonPath("$.name", is("view_item_hourly")));
}
@Test
public void testUpdateMeasureForNotFound() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
String measureJson = JsonUtil.toJson(measure);
doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage
.MEASURE_ID_DOES_NOT_EXIST))
.when(service).updateMeasure(measure);
mvc.perform(put(URLHelper.API_VERSION_PATH + "/measures")
.contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isNotFound());
}
@Test
public void testUpdateMeasureForTypeMismatch() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
String measureJson = JsonUtil.toJson(measure);
doThrow(new GriffinException.BadRequestException(GriffinExceptionMessage
.MEASURE_TYPE_DOES_NOT_MATCH))
.when(service).updateMeasure(measure);
mvc.perform(put(URLHelper.API_VERSION_PATH + "/measures")
.contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isBadRequest());
}
@Test
public void testGetAllMeasuresByOwner() throws Exception {
String owner = "test";
List<Measure> measureList = new LinkedList<>();
Measure measure = createGriffinMeasure("view_item_hourly");
measureList.add(measure);
given(service.getAliveMeasuresByOwner(owner)).willReturn(measureList);
mvc.perform(get(URLHelper.API_VERSION_PATH
+ "/measures/owner/" + owner)
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0].name", is("view_item_hourly")))
;
}
@Test
public void testCreateNewMeasureForSuccess() throws Exception {
GriffinMeasure measure = createGriffinMeasure("view_item_hourly");
String measureJson = JsonUtil.toJson(measure);
given(service.createMeasure(measure)).willReturn(measure);
mvc.perform(post(URLHelper.API_VERSION_PATH + "/measures")
.contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isCreated())
.andExpect(jsonPath("$.name", is("view_item_hourly")));
}
@Test
public void testCreateNewMeasureForFailWithDuplicate() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
String measureJson = JsonUtil.toJson(measure);
doThrow(new GriffinException.ConflictException(GriffinExceptionMessage
.MEASURE_NAME_ALREADY_EXIST))
.when(service).createMeasure(measure);
mvc.perform(post(URLHelper.API_VERSION_PATH + "/measures")
.contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isConflict());
}
@Test
public void testCreateNewMeasureForFailWithInvalidParams() throws Exception {
Measure measure = createGriffinMeasure("view_item_hourly");
String measureJson = JsonUtil.toJson(measure);
doThrow(new GriffinException.BadRequestException(GriffinExceptionMessage
.MISSING_METRIC_NAME))
.when(service).createMeasure(measure);
mvc.perform(post(URLHelper.API_VERSION_PATH + "/measures")
.contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isBadRequest());
}
}
| 4,045 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure.repo;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import java.util.List;
import org.apache.griffin.core.config.EclipseLinkJpaConfigForTest;
import org.apache.griffin.core.measure.entity.*;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@DataJpaTest
@ContextConfiguration(classes = {EclipseLinkJpaConfigForTest.class})
public class MeasureRepoTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private MeasureRepo measureRepo;
@MockBean
private IMetaStoreClient client;
@Before
public void setup() throws Exception {
entityManager.clear();
entityManager.flush();
setEntityManager();
}
@Test
public void testFindByNameAndDeleted() {
String name = "m1";
List<Measure> measures = measureRepo.findByNameAndDeleted(name, false);
GriffinMeasure m = (GriffinMeasure) measures.get(0);
List<DataSource> sources = m.getDataSources();
DataConnector connector = sources.get(0).getConnector();
Rule rule = m.getEvaluateRule().getRules().get(0);
assertEquals(m.getSinksList().size(), 2);
assertEquals(sources.get(0).isBaseline(), true);
assertEquals(sources.get(0).getCheckpointMap().size(), 1);
assertEquals(connector.getDataFrameName(), "kafka");
assertEquals(connector.getConfigMap().size(), 3);
assertEquals(rule.getDqType(), DqType.ACCURACY);
assertEquals(rule.getInDataFrameName(), "in");
assertEquals(rule.getOutDataFrameName(), "out");
assertEquals(rule.getDetailsMap().size(), 1);
assertEquals(rule.getOutList().size(), 2);
}
@Test
public void testFindByDeleted() {
List<Measure> measures = measureRepo.findByDeleted(false);
assertThat(measures.size()).isEqualTo(3);
}
@Test
public void testFindByOwnerAndDeleted() {
List<Measure> measures = measureRepo.findByOwnerAndDeleted("test",
false);
assertThat(measures.size()).isEqualTo(2);
}
@Test
public void testFindByIdAndDeleted() {
Measure measure = measureRepo.findByIdAndDeleted(1L, true);
assertThat(measure).isNull();
}
@Test
public void testFindOrganizations() {
List<String> organizations = measureRepo.findOrganizations(false);
assertThat(organizations.size()).isEqualTo(3);
}
@Test
public void testFindNameByOrganization() {
List<String> names = measureRepo.findNameByOrganization("org1", false);
assertThat(names.size()).isEqualTo(1);
}
public void setEntityManager() throws Exception {
Measure measure1 = createGriffinMeasure("m1");
measure1.setOrganization("org1");
entityManager.persistAndFlush(measure1);
Measure measure2 = createGriffinMeasure("m2");
measure2.setOrganization("org2");
entityManager.persistAndFlush(measure2);
Measure measure3 = createGriffinMeasure("m3");
measure3.setOrganization("org3");
measure3.setOwner("owner");
entityManager.persistAndFlush(measure3);
}
}
| 4,046 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/measure/repo/DataConnectorRepoTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.measure.repo;
import static org.apache.griffin.core.util.EntityMocksHelper.createDataConnector;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.griffin.core.config.EclipseLinkJpaConfigForTest;
import org.apache.griffin.core.measure.entity.DataConnector;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@DataJpaTest
@ContextConfiguration(classes = {EclipseLinkJpaConfigForTest.class})
public class DataConnectorRepoTest {
@Autowired
private TestEntityManager entityManager;
@Autowired
private DataConnectorRepo dcRepo;
@MockBean
private IMetaStoreClient client;
@Before
public void setup() throws Exception {
entityManager.clear();
entityManager.flush();
setEntityManager();
}
@Test
public void testFindByConnectorNames() {
List<DataConnector> connectors = dcRepo.findByConnectorNames(Arrays
.asList("name1", "name2"));
assertEquals(connectors.size(), 2);
}
@Test
public void testFindByConnectorNamesWithEmpty() {
List<DataConnector> connectors = dcRepo.findByConnectorNames(
new ArrayList<>());
assertEquals(connectors.size(), 0);
}
public void setEntityManager() throws Exception {
DataConnector dc1 = createDataConnector("name1", "database1", "table1",
"/dt=#YYYYMM#");
entityManager.persistAndFlush(dc1);
DataConnector dc2 = createDataConnector("name2", "database2", "table2",
"/dt=#YYYYMM#");
entityManager.persistAndFlush(dc2);
DataConnector dc3 = createDataConnector("name3", "database3", "table3",
"/dt=#YYYYMM#");
entityManager.persistAndFlush(dc3);
}
}
| 4,047 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metric/MetricControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metric;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_METRIC_VALUE_FORMAT;
import static org.apache.griffin.core.measure.entity.DqType.ACCURACY;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.mockito.BDDMockito.given;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.exception.GriffinExceptionHandler;
import org.apache.griffin.core.metric.model.Metric;
import org.apache.griffin.core.metric.model.MetricValue;
import org.apache.griffin.core.util.JsonUtil;
import org.apache.griffin.core.util.URLHelper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
@RunWith(SpringRunner.class)
public class MetricControllerTest {
private MockMvc mvc;
@InjectMocks
private MetricController controller;
@Mock
private MetricServiceImpl service;
@Before
public void setup() {
mvc = MockMvcBuilders
.standaloneSetup(controller)
.setControllerAdvice(new GriffinExceptionHandler())
.build();
}
@Test
public void testGetAllMetricsSuccess() throws Exception {
Metric metric = new Metric("metricName", ACCURACY, "owner", Collections
.emptyList());
given(service.getAllMetrics()).willReturn(
Collections.singletonMap("measureName", Collections
.singletonList(metric)));
mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.measureName", hasSize(1)));
}
@Test
public void testGetAllMetricsFailureWithException() throws Exception {
given(service.getAllMetrics())
.willThrow(new GriffinException.ServiceException(
"Failed to get metrics", new RuntimeException()));
mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics"))
.andExpect(status().isInternalServerError());
}
@Test
public void testGetMetricValuesSuccess() throws Exception {
MetricValue value = new MetricValue("jobName", 1L, new HashMap<>());
given(service.getMetricValues(Matchers.anyString(), Matchers.anyInt(),
Matchers.anyInt(), Matchers.anyLong()))
.willReturn(Collections.singletonList(value));
mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics/values")
.param("metricName", "jobName")
.param("size", "5"))
.andExpect(jsonPath("$.[0].name", is("jobName")));
}
@Test
public void testGetMetricValuesFailureWithException() throws Exception {
given(service.getMetricValues(Matchers.anyString(), Matchers.anyInt(),
Matchers.anyInt(), Matchers.anyLong()))
.willThrow(new GriffinException.ServiceException(
"Failed to get metric values", new IOException()));
mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics/values")
.param("metricName", "jobName")
.param("size", "5"))
.andExpect(status().isInternalServerError());
}
@Test
public void testAddMetricValuesSuccess() throws Exception {
List<MetricValue> values = Collections.singletonList(new MetricValue());
given(service.addMetricValues(Matchers.any()))
.willReturn(
new ResponseEntity<>(
"{\"errors\": false, \"items\": []}",
HttpStatus.OK));
mvc.perform(
post(URLHelper.API_VERSION_PATH + "/metrics/values")
.contentType(MediaType.APPLICATION_JSON)
.content(JsonUtil.toJson(values)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors", is(false)));
}
@Test
public void testAddMetricValuesFailureWithException() throws Exception {
List<MetricValue> values = Collections.singletonList(new MetricValue());
given(service.addMetricValues(Matchers.any()))
.willThrow(new GriffinException.ServiceException(
"Failed to add metric values", new IOException()));
mvc.perform(post(URLHelper.API_VERSION_PATH + "/metrics/values")
.contentType(MediaType.APPLICATION_JSON)
.content(JsonUtil.toJson(values)))
.andExpect(status().isInternalServerError());
}
@Test
public void testAddMetricValuesFailureWithInvalidFormat() throws Exception {
List<MetricValue> values = Collections.singletonList(new MetricValue());
given(service.addMetricValues(Matchers.any()))
.willThrow(new GriffinException.BadRequestException
(INVALID_METRIC_VALUE_FORMAT));
mvc.perform(post(URLHelper.API_VERSION_PATH + "/metrics/values")
.contentType(MediaType.APPLICATION_JSON)
.content(JsonUtil.toJson(values)))
.andExpect(status().isBadRequest());
}
@Test
public void testDeleteMetricValuesSuccess() throws Exception {
given(service.deleteMetricValues("metricName"))
.willReturn(new ResponseEntity<>("{\"failures\": []}",
HttpStatus.OK));
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/metrics/values")
.param("metricName", "metricName"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.failures", hasSize(0)));
}
@Test
public void testDeleteMetricValuesFailureWithException() throws Exception {
given(service.deleteMetricValues("metricName"))
.willThrow(new GriffinException.ServiceException(
"Failed to delete metric values.",
new IOException()));
mvc.perform(delete(URLHelper.API_VERSION_PATH + "/metrics/values")
.param("metricName", "metricName"))
.andExpect(status().isInternalServerError());
}
}
| 4,048 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metric/MetricStoreImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metric;
import org.apache.griffin.core.metric.model.MetricValue;
import org.apache.http.HttpEntity;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.*;
@RunWith(PowerMockRunner.class)
@PrepareForTest({RestClient.class, RestClientBuilder.class})
@PowerMockIgnore("javax.management.*")
public class MetricStoreImplTest {
private static final String INDEX = "griffin";
private static final String TYPE = "accuracy";
private static final String urlBase = String.format("/%s/%s", INDEX, TYPE);
private static final String urlGet = urlBase.concat("/_search?filter_path=hits.hits._source");
private RestClient restClientMock;
@Before
public void setup(){
PowerMockito.mockStatic(RestClient.class);
restClientMock = PowerMockito.mock(RestClient.class);
RestClientBuilder restClientBuilderMock = PowerMockito.mock(RestClientBuilder.class);
given(RestClient.builder(anyVararg())).willReturn(restClientBuilderMock);
given(restClientBuilderMock.build()).willReturn(restClientMock);
}
@Test
public void testBuildBasicAuthString()
throws NoSuchMethodException, InvocationTargetException,
IllegalAccessException {
Method m = MetricStoreImpl.class.getDeclaredMethod
("buildBasicAuthString", String.class,
String.class);
m.setAccessible(true);
String authStr = (String) m.invoke(null, "user", "password");
assertTrue(authStr.equals("Basic dXNlcjpwYXNzd29yZA=="));
}
@Test
public void testMetricGetting() throws IOException, URISyntaxException {
//given
Response responseMock = PowerMockito.mock(Response.class);
HttpEntity httpEntityMock = PowerMockito.mock(HttpEntity.class);
InputStream is = Thread.currentThread().getContextClassLoader()
.getResourceAsStream("metricvalue.json");
Map<String, String> map = new HashMap<>();
map.put("q", "metadata.applicationId:application_1549876136110_0018");
Map<String, Object> value = new HashMap<String, Object>(){{
put("total", 74);
put("miss", 0);
put("matched", 74);
put("matchedFraction", 1);
}};
MetricValue expectedMetric = new MetricValue("de_demo_results_comparision",
1549985089648L,
Collections.singletonMap("applicationId", "application_1549876136110_0018"),
value);
given(restClientMock.performRequest(eq("GET"), eq(urlGet), eq(map), anyVararg())).willReturn(responseMock);
given(responseMock.getEntity()).willReturn(httpEntityMock);
given(httpEntityMock.getContent()).willReturn(is);
//when
MetricStoreImpl metricStore = new MetricStoreImpl("localhost", 0, "", "", "");
MetricValue metric = metricStore.getMetric("application_1549876136110_0018");
//then
//PowerMockito.verifyStatic();
assertEquals(expectedMetric, metric);
}
}
| 4,049 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metric;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinJob;
import static org.apache.griffin.core.util.EntityMocksHelper.createGriffinMeasure;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.BDDMockito.given;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.job.repo.JobRepo;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.measure.repo.MeasureRepo;
import org.apache.griffin.core.metric.model.Metric;
import org.apache.griffin.core.metric.model.MetricValue;
import org.apache.griffin.core.util.JsonUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class MetricServiceImplTest {
@InjectMocks
private MetricServiceImpl service;
@Mock
private MeasureRepo<Measure> measureRepo;
@Mock
private JobRepo<AbstractJob> jobRepo;
@Mock
private MetricStoreImpl metricStore;
@Mock
private JobInstanceRepo jobInstanceRepo;
@Autowired
private Environment env;
@Before
public void setup() {
}
@Test
public void test() {
Environment e = env;
System.out.println(env
.getProperty("spring.datasource.driver - class -name "));
}
@Test
public void testGetAllMetricsSuccess() throws Exception {
Measure measure = createGriffinMeasure("measureName");
measure.setId(1L);
AbstractJob job = createGriffinJob();
MetricValue value = new MetricValue("jobName", 1L, new HashMap<>());
given(jobRepo.findByDeleted(false)).willReturn(Collections
.singletonList(job));
given(measureRepo.findByDeleted(false)).willReturn(Collections
.singletonList(measure));
given(metricStore.getMetricValues(Matchers.anyString(),
Matchers.anyInt(), Matchers.anyInt(),
Matchers.anyLong()))
.willReturn(Collections.singletonList(value));
Map<String, List<Metric>> metricMap = service.getAllMetrics();
assertEquals(metricMap.get("measureName").get(0).getName(), "jobName");
}
@Test(expected = GriffinException.ServiceException.class)
public void testGetAllMetricsFailureWithException() throws Exception {
Measure measure = createGriffinMeasure("measureName");
measure.setId(1L);
AbstractJob job = createGriffinJob();
given(jobRepo.findByDeleted(false)).willReturn(Collections
.singletonList(job));
given(measureRepo.findByDeleted(false)).willReturn(Collections
.singletonList(measure));
given(metricStore.getMetricValues(Matchers.anyString(),
Matchers.anyInt(), Matchers.anyInt(),
Matchers.anyLong()))
.willThrow(new IOException());
service.getAllMetrics();
}
@Test
public void testGetMetricValuesSuccess() throws IOException {
MetricValue value = new MetricValue("jobName", 1L, new HashMap<>());
given(metricStore.getMetricValues(Matchers.anyString(),
Matchers.anyInt(), Matchers.anyInt(),
Matchers.anyLong()))
.willReturn(Collections.singletonList(value));
List<MetricValue> values = service.getMetricValues("jobName", 0, 300,
0);
assertEquals(values.size(), 1);
assertEquals(values.get(0).getName(), "jobName");
}
@Test(expected = GriffinException.ServiceException.class)
public void testGetMetricValuesFailureWithException() throws IOException {
given(metricStore.getMetricValues(Matchers.anyString(),
Matchers.anyInt(), Matchers.anyInt(),
Matchers.anyLong()))
.willThrow(new IOException());
service.getMetricValues("jobName", 0, 300, 0);
}
@Test
public void testAddMetricValuesSuccess() throws IOException {
Map<String, Object> value = new HashMap<>();
value.put("total", 10000);
value.put("matched", 10000);
List<MetricValue> values = Collections.singletonList(
new MetricValue("jobName", 1L, value));
given(metricStore.addMetricValues(values))
.willReturn(
new ResponseEntity(
"{\"errors\": false, \"items\": []}",
HttpStatus.OK));
ResponseEntity response = service.addMetricValues(values);
Map body = JsonUtil.toEntity(response.getBody().toString(), Map.class);
assertEquals(response.getStatusCode(), HttpStatus.OK);
assertNotNull(body);
assertEquals(body.get("errors").toString(), "false");
}
@Test(expected = GriffinException.BadRequestException.class)
public void testAddMetricValuesFailureWithInvalidFormat() {
List<MetricValue> values = Collections.singletonList(new MetricValue());
service.addMetricValues(values);
}
@Test(expected = GriffinException.ServiceException.class)
public void testAddMetricValuesFailureWithException() throws IOException {
Map<String, Object> value = new HashMap<>();
value.put("total", 10000);
value.put("matched", 10000);
List<MetricValue> values = Collections.singletonList(
new MetricValue("jobName", 1L, value));
given(metricStore.addMetricValues(values)).willThrow(new IOException());
service.addMetricValues(values);
}
@Test
public void testDeleteMetricValuesSuccess() throws IOException {
given(metricStore.deleteMetricValues("metricName"))
.willReturn(new ResponseEntity("{\"failures\": []}",
HttpStatus.OK));
ResponseEntity response = service.deleteMetricValues("metricName");
Map body = JsonUtil.toEntity(response.getBody().toString(), Map.class);
assertEquals(response.getStatusCode(), HttpStatus.OK);
assertNotNull(body);
assertEquals(body.get("failures"), Collections.emptyList());
}
@Test(expected = GriffinException.ServiceException.class)
public void testDeleteMetricValuesFailureWithException()
throws IOException {
given(metricStore.deleteMetricValues("metricName"))
.willThrow(new IOException());
service.deleteMetricValues("metricName");
}
@Test
public void testFindMetricSuccess() throws IOException {
Long id = 1L;
String appId = "application";
MetricValue expectedMetric = new MetricValue(
"name", 1234L, Collections.singletonMap("applicationId", appId), new HashMap<>());
given(jobInstanceRepo.findByInstanceId(id))
.willReturn(new JobInstanceBean(LivySessionStates.State.RUNNING, 12L, 32L, appId));
given(metricStore.getMetric(appId))
.willReturn(expectedMetric);
MetricValue actualMetric = service.findMetric(id);
assertEquals(expectedMetric, actualMetric);
}
@Test(expected = GriffinException.NotFoundException.class)
public void testFailedToFindJobInstance() throws IOException {
Long id = 1L;
given(jobInstanceRepo.findByInstanceId(id))
.willReturn(null);
service.findMetric(id);
}
@Test(expected = GriffinException.ServiceException.class)
public void testFindMetricFailure() throws IOException {
Long id = 1L;
String appId = "application";
given(jobInstanceRepo.findByInstanceId(id))
.willReturn(new JobInstanceBean(LivySessionStates.State.RUNNING, 12L, 32L, appId));
given(metricStore.getMetric(appId))
.willThrow(new GriffinException.ServiceException("", new RuntimeException()));
service.findMetric(id);
}
}
| 4,050 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metastore.kafka;
import static org.assertj.core.api.Assertions.fail;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import io.confluent.kafka.schemaregistry.client.rest.entities.Config;
import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.web.client.RestTemplate;
@RunWith(SpringRunner.class)
public class KafkaSchemaServiceImplTest {
@MockBean
private KafkaSchemaServiceImpl service;
@Before
public void setup() {
service.restTemplate = mock(RestTemplate.class);
}
@Test
public void testGetSchemaString() {
try {
SchemaString ss = new SchemaString();
ResponseEntity entity = mock(ResponseEntity.class);
when(service.restTemplate.getForEntity(
"${kafka.schema.registry.url}/schemas/ids/1",
SchemaString.class)).thenReturn(entity);
when(entity.getBody()).thenReturn(ss);
service.getSchemaString(1);
assertTrue(true);
} catch (Throwable t) {
fail("Cannot get all tables from all dbs");
}
}
@Test
public void testGetSubjects() {
try {
ResponseEntity entity = mock(ResponseEntity.class);
when(service.restTemplate.getForEntity(
"${kafka.schema.registry.url}/subjects",
String[].class)).thenReturn(entity);
when(entity.getBody()).thenReturn(new String[]{"aaa", "bbb"});
service.getSubjects();
assertTrue(true);
} catch (Throwable t) {
fail("Cannot get all tables from all dbs");
}
}
@Test
public void testGetSubjectVersions() {
try {
ResponseEntity entity = mock(ResponseEntity.class);
when(service.restTemplate.getForEntity(
"${kafka.schema.registry.url}/subjects/sub/versions",
Integer[].class)).thenReturn(entity);
when(entity.getBody()).thenReturn(new Integer[]{1, 2});
service.getSubjectVersions("sub");
assertTrue(true);
} catch (Throwable t) {
fail("Cannot get all tables from all dbs");
}
}
@Test
public void testGetSubjectSchema() {
try {
Schema schema = mock(Schema.class);
ResponseEntity entity = mock(ResponseEntity.class);
when(service.restTemplate.getForEntity(
"${kafka.schema.registry.url}/subjects/sub/versions/ver",
Schema.class)).thenReturn(entity);
when(entity.getBody()).thenReturn(schema);
service.getSubjectSchema("sub", "ver");
assertTrue(true);
} catch (Throwable t) {
fail("Cannot get all tables from all dbs");
}
}
@Test
public void testGetTopLevelConfig() {
try {
Config config = mock(Config.class);
ResponseEntity entity = mock(ResponseEntity.class);
when(service.restTemplate.getForEntity(
"${kafka.schema.registry.url}/config",
Config.class)).thenReturn(entity);
when(entity.getBody()).thenReturn(config);
service.getTopLevelConfig();
assertTrue(true);
} catch (Throwable t) {
fail("Cannot get all tables from all dbs");
}
}
@Test
public void testGetSubjectLevelConfig() {
try {
Config config = mock(Config.class);
ResponseEntity entity = mock(ResponseEntity.class);
when(service.restTemplate.getForEntity(
"${kafka.schema.registry.url}/config/subject",
Config.class)).thenReturn(entity);
when(entity.getBody()).thenReturn(config);
service.getSubjectLevelConfig("subject");
assertTrue(true);
} catch (Throwable t) {
fail("Cannot get all tables from all dbs");
}
}
}
| 4,051 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metastore.kafka;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
import org.apache.griffin.core.util.URLHelper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@RunWith(SpringRunner.class)
@WebMvcTest(value = KafkaSchemaController.class, secure = false)
public class KafkaSchemaControllerTest {
private static final String API_PATH = URLHelper.API_VERSION_PATH +
URLHelper.KAFKA_API_PATH;
@Autowired
private MockMvc mockMvc;
@MockBean
KafkaSchemaServiceImpl kafkaSchemaService;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
}
@Test
public void test_getSubjects() throws Exception {
int id = 1;
SchemaString ss = new SchemaString();
when(kafkaSchemaService.getSchemaString(id)).thenReturn(ss);
mockMvc.perform(get(API_PATH + "/schema/{id}", id))
.andExpect(status().isOk());
verify(kafkaSchemaService).getSchemaString(id);
}
@Test
public void test_getSchemaString() throws Exception {
when(kafkaSchemaService.getSubjects()).thenReturn(null);
mockMvc.perform(get(API_PATH + "/subject"))
.andExpect(status().isOk());
verify(kafkaSchemaService).getSubjects();
}
@Test
public void test_getSubjectVersions() throws Exception {
String subject = "s-1";
when(kafkaSchemaService.getSubjectVersions(subject)).thenReturn(Arrays
.asList(1, 2, 3));
mockMvc.perform(get(API_PATH + "/versions")
.param("subject", subject))
.andExpect(status().isOk());
verify(kafkaSchemaService).getSubjectVersions(subject);
}
@Test
public void test_getSubjectSchema() throws Exception {
String subject = "ss.s";
String version = "ss";
when(kafkaSchemaService.getSubjectSchema(subject, version))
.thenReturn(null);
mockMvc.perform(get(API_PATH + "/subjectSchema", subject, version)
.param("subject", subject)
.param("version", version))
.andExpect(status().isOk());
verify(kafkaSchemaService).getSubjectSchema(subject, version);
}
@Test
public void test_getTopLevelConfig() throws Exception {
when(kafkaSchemaService.getTopLevelConfig()).thenReturn(null);
mockMvc.perform(get(API_PATH + "/config"))
.andExpect(status().isOk());
verify(kafkaSchemaService).getTopLevelConfig();
}
@Test
public void test_getSubjectLevelConfig() throws Exception {
String subject = "sss";
when(kafkaSchemaService.getSubjectLevelConfig(subject))
.thenReturn(null);
mockMvc.perform(get(API_PATH + "/config/{subject}", subject))
.andExpect(status().isOk());
verify(kafkaSchemaService).getSubjectLevelConfig(subject);
}
}
| 4,052 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceJDBCImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metastore.hive;
import org.apache.griffin.core.config.CacheConfig;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Table;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.powermock.api.mockito.PowerMockito.when;
@RunWith(SpringRunner.class)
public class HiveMetastoreServiceJDBCImplTest {
@TestConfiguration
@EnableCaching
public static class HiveMetaStoreServiceConfiguration extends CacheConfig {
@Bean("hiveMetaStoreServiceJdbcImpl")
public HiveMetaStoreServiceJdbcImpl serviceJDBC() {
return new HiveMetaStoreServiceJdbcImpl();
}
@Bean
CacheManager cacheManager() {
return new ConcurrentMapCacheManager("jdbcHive");
}
}
private HiveMetaStoreServiceJdbcImpl serviceJdbc = new HiveMetaStoreServiceJdbcImpl();
@Mock
private Connection conn;
@Mock
private Statement stmt;
@Mock
private ResultSet rs;
@Before
public void setUp() throws SQLException {
serviceJdbc.setConn(conn);
serviceJdbc.setHiveClassName("org.apache.hive.jdbc.HiveDriver");
serviceJdbc.setNeedKerberos("true");
serviceJdbc.setKeytabPath("/path/to/keytab");
serviceJdbc.setKeytabUser("user");
}
@Test
public void testGetComment() {
String colStr = "`session_date` string COMMENT 'this is session date'";
String comment = serviceJdbc.getComment(colStr);
assert (comment.equals("this is session date"));
colStr = "`session_date` string COMMENT ''";
comment = serviceJdbc.getComment(colStr);
Assert.assertTrue(comment.isEmpty());
}
@Test
public void testgetAllDatabases() throws SQLException {
when(conn.createStatement()).thenReturn(stmt);
when(stmt.executeQuery(anyString())).thenReturn(rs);
when(rs.next()).thenReturn(true).thenReturn(false);
when(rs.getString(anyInt())).thenReturn("default");
Iterable<String> res = serviceJdbc.getAllDatabases();
for (String s : res) {
Assert.assertEquals(s, "default");
break;
}
}
@Test
public void testGetAllTableNames() throws SQLException {
when(conn.createStatement()).thenReturn(stmt);
when(stmt.executeQuery(anyString())).thenReturn(rs);
when(rs.next()).thenReturn(true).thenReturn(true).thenReturn(false);
when(rs.getString(anyInt())).thenReturn("session_data").thenReturn("session_summary");
Iterable<String> res = serviceJdbc.getAllTableNames("default");
StringBuilder sb = new StringBuilder();
for (String s : res) {
sb.append(s).append(",");
}
Assert.assertEquals(sb.toString(), "session_data,session_summary,");
}
@Test
public void testGetTable() throws SQLException {
String meta = "CREATE EXTERNAL TABLE `default.session_data`( `session_date` string COMMENT 'this is session date', `site_id` int COMMENT '', `guid` string COMMENT '', `user_id` string COMMENT '')COMMENT 'session_data for session team' PARTITIONED BY ( `dt` string, `place` int) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' LOCATION 'hdfs://localhost/session/common/session_data'TBLPROPERTIES ( 'COLUMN_STATS_ACCURATE'='false', 'avro.schema.url'='hdfs://localhost/griffin/session/avro/session-data-1.0.avsc', 'transient_lastDdlTime'='1535651637')";
when(conn.createStatement()).thenReturn(stmt);
when(stmt.executeQuery(anyString())).thenReturn(rs);
when(rs.next()).thenReturn(true).thenReturn(false);
when(rs.getString(anyInt())).thenReturn(meta);
Table res = serviceJdbc.getTable("default", "session_data");
assert (res.getDbName().equals("default"));
assert (res.getTableName().equals("session_data"));
assert (res.getSd().getLocation().equals("hdfs://localhost/session/common/session_data"));
List<FieldSchema> fieldSchemas = res.getSd().getCols();
for (FieldSchema fieldSchema : fieldSchemas) {
Assert.assertEquals(fieldSchema.getName(),"session_date");
Assert.assertEquals(fieldSchema.getType(),"string");
Assert.assertEquals(fieldSchema.getComment(),"this is session date");
break;
}
}
} | 4,053 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metastore.hive;
import org.apache.griffin.core.util.URLHelper;
import org.apache.hadoop.hive.metastore.api.Table;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import java.util.*;
import static org.hamcrest.Matchers.*;
import static org.mockito.BDDMockito.given;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@RunWith(SpringRunner.class)
@WebMvcTest(value = HiveMetaStoreController.class, secure = false)
public class HiveMetaStoreControllerTest {
@Autowired
private MockMvc mockMvc;
@MockBean
@Qualifier(value = "metastoreSvc")
private HiveMetaStoreService hiveMetaStoreService;
@Before
public void setup() {
}
@Test
public void testGetAllDatabases() throws Exception {
String dbName = "default";
given(hiveMetaStoreService.getAllDatabases()).willReturn(Arrays
.asList(dbName));
mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/dbs"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0]", is(dbName)));
}
@Test
public void testGetAllTableNames() throws Exception {
String dbName = "default";
String tableName = "table";
given(hiveMetaStoreService.getAllTableNames(dbName)).willReturn(Arrays
.asList(tableName));
mockMvc.perform(get(URLHelper.API_VERSION_PATH +
"/metadata/hive/tables/names").param("db",
dbName))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0]", is(tableName)));
}
@Test
public void testGetAllTablesWithDb() throws Exception {
String dbName = "default";
given(hiveMetaStoreService.getAllTable(dbName)).willReturn(Arrays
.asList(new Table()));
mockMvc.perform(get(URLHelper.API_VERSION_PATH +
"/metadata/hive/tables").param("db",
dbName))
.andExpect(status().isOk())
.andExpect(jsonPath("$.[0].tableName", is(nullValue())));
}
@Test
public void testGetAllTables() throws Exception {
Map<String, List<Table>> results = new HashMap<>();
results.put("table", new ArrayList<>());
given(hiveMetaStoreService.getAllTable()).willReturn(results);
mockMvc.perform(get(URLHelper.API_VERSION_PATH +
"/metadata/hive/dbs/tables"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.table", hasSize(0)));
}
@Test
public void testGetTable() throws Exception {
String dbName = "default";
String tableName = "table";
given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(
new Table(tableName, null, null, 0, 0, 0, null, null,
null, null, null, null));
mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/table")
.param("db", dbName).param("table",
tableName))
.andExpect(status().isOk())
.andExpect(jsonPath("$.tableName", is(tableName)));
}
}
| 4,054 |
0 | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore | Create_ds/griffin/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.metastore.hive;
import org.apache.griffin.core.config.CacheConfig;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.thrift.TException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.*;
@RunWith(SpringRunner.class)
@ContextConfiguration
public class HiveMetaStoreServiceImplTest {
@Configuration
@EnableCaching
public static class HiveMetaStoreServiceConfiguration extends CacheConfig {
@Bean("hiveMetaStoreServiceImpl")
public HiveMetaStoreServiceImpl service() {
return new HiveMetaStoreServiceImpl();
}
@Bean
CacheManager cacheManager() {
return new ConcurrentMapCacheManager("hive");
}
}
@MockBean
private HiveMetaStoreClient client;
@Autowired
private HiveMetaStoreService service;
@Autowired
private CacheManager cacheManager;
@Before
public void setup() {
cacheManager.getCache("hive").clear();
}
@Test
public void testGetAllDatabasesForNormalRun() throws TException {
given(client.getAllDatabases()).willReturn(Arrays.asList("default"));
assertEquals(service.getAllDatabases().iterator().hasNext(), true);
}
@Test
public void testGetAllDatabasesForMetaException() throws TException {
given(client.getAllDatabases()).willThrow(MetaException.class);
doNothing().when(client).reconnect();
assertTrue(service.getAllDatabases() == null);
verify(client).getAllDatabases();
verify(client).reconnect();
// check it's not cached
service.getAllDatabases();
verify(client, times(2)).reconnect();
verify(client, times(2)).getAllDatabases();
}
@Test
public void testGetAllTableNamesForNormalRun() throws MetaException {
String dbName = "default";
given(client.getAllTables(dbName)).willReturn(Arrays.asList(dbName));
assertEquals(service.getAllTableNames(dbName).iterator().hasNext(),
true);
}
@Test
public void testGetAllTableNamesForMetaException() throws MetaException {
String dbName = "default";
given(client.getAllTables(dbName)).willThrow(MetaException.class);
doNothing().when(client).reconnect();
assertTrue(service.getAllTableNames(dbName) == null);
verify(client).reconnect();
verify(client).getAllTables(dbName);
// check it's not cached
service.getAllTableNames(dbName);
verify(client, times(2)).reconnect();
verify(client, times(2)).getAllTables(dbName);
}
@Test
public void testGetAllTableByDBNameForNormalRun() throws TException {
String useDbName = "default";
String tableName = "table";
given(client.getAllTables(useDbName)).willReturn(Arrays
.asList(tableName));
given(client.getTable(useDbName, tableName)).willReturn(new Table());
assertEquals(service.getAllTable(useDbName).size(), 1);
}
@Test
public void testGetAllTableByDBNameForMetaException() throws TException {
String useDbName = "default";
given(client.getAllTables(useDbName)).willThrow(MetaException.class);
doNothing().when(client).reconnect();
assertEquals(0, service.getAllTable(useDbName).size());
verify(client).reconnect();
verify(client).getAllTables(useDbName);
// check it's not cached
service.getAllTable(useDbName);
verify(client, times(2)).reconnect();
verify(client, times(2)).getAllTables(useDbName);
}
@Test
public void testGetAllTableForNormalRun() throws TException {
String useDbName = "default";
String tableName = "table";
List<String> databases = Arrays.asList(useDbName);
given(client.getAllDatabases()).willReturn(databases);
given(client.getAllTables(databases.get(0))).willReturn(Arrays
.asList(tableName));
given(client.getTable(useDbName, tableName)).willReturn(new Table());
assertEquals(service.getAllTable().size(), 1);
}
@Test
public void testGetAllTableForMetaException1() throws TException {
String useDbName = "default";
List<String> databases = Arrays.asList(useDbName);
given(client.getAllDatabases()).willReturn(databases);
given(client.getAllTables(useDbName)).willThrow(MetaException.class);
doNothing().when(client).reconnect();
assertEquals(service.getAllTable().get(useDbName).size(), 0);
}
@Test
public void testGetAllTableForMetaException2() throws TException {
given(client.getAllDatabases()).willThrow(MetaException.class);
doNothing().when(client).reconnect();
assertEquals(service.getAllTable().size(), 0);
}
@Test
public void testGetTableForNormalRun() throws Exception {
String dbName = "default";
String tableName = "tableName";
given(client.getTable(dbName, tableName)).willReturn(new Table());
assertTrue(service.getTable(dbName, tableName) != null);
}
@Test
public void testGetTableForException() throws Exception {
String dbName = "default";
String tableName = "tableName";
given(client.getTable(dbName, tableName)).willThrow(NoSuchObjectException.class);
doNothing().when(client).reconnect();
assertTrue(service.getTable(dbName, tableName) == null);
verify(client).reconnect();
verify(client).getTable(dbName, tableName);
// check it's not cached
service.getTable(dbName, tableName);
verify(client, times(2)).reconnect();
verify(client, times(2)).getTable(dbName, tableName);
}
@Test
public void testEvictHiveCache() throws Exception {
String useDbName = "default";
String tableName = "tableName";
List<String> databases = Arrays.asList(useDbName);
given(client.getAllDatabases()).willReturn(databases);
given(client.getAllTables(databases.get(0))).willReturn(Arrays
.asList(tableName));
given(client.getTable(useDbName, tableName)).willReturn(new Table());
// populate cache
assertEquals(service.getAllTable().size(), 1);
verify(client).getAllDatabases();
verify(client).getAllTables(useDbName);
verify(client).getTable(useDbName, tableName);
// verify cached
service.getAllTable();
verifyNoMoreInteractions(client);
// reset the cache, verify values are cached again
service.evictHiveCache();
service.getAllTable().size();
service.getAllTable().size();
verify(client, times(2)).getAllDatabases();
verify(client, times(2)).getAllTables(useDbName);
verify(client, times(2)).getTable(useDbName, tableName);
}
}
| 4,055 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core;
import org.apache.griffin.core.common.SimpleCORSFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication
@EnableScheduling
public class GriffinWebApplication {
private static final Logger LOGGER = LoggerFactory
.getLogger(GriffinWebApplication.class);
public static void main(String[] args) {
SpringApplication.run(GriffinWebApplication.class, args);
LOGGER.info("application started");
}
@Bean
public SimpleCORSFilter simpleFilter() {
return new SimpleCORSFilter();
}
}
| 4,056 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/JsonUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JsonUtil {
private static final Logger LOGGER = LoggerFactory
.getLogger(JsonUtil.class);
public static String toJson(Object obj) throws JsonProcessingException {
if (obj == null) {
LOGGER.warn("Object cannot be empty!");
return null;
}
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(obj);
}
public static String toJsonWithFormat(Object obj)
throws JsonProcessingException {
if (obj == null) {
LOGGER.warn("Object to be formatted cannot be empty!");
return null;
}
ObjectWriter mapper = new ObjectMapper().writer()
.withDefaultPrettyPrinter();
return mapper.writeValueAsString(obj);
}
public static <T> T toEntity(String jsonStr, Class<T> type)
throws IOException {
if (StringUtils.isEmpty(jsonStr)) {
LOGGER.warn("Json string {} is empty!", type);
return null;
}
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
return mapper.readValue(jsonStr, type);
}
public static <T> T toEntity(File file, TypeReference type)
throws IOException {
if (file == null) {
LOGGER.warn("File cannot be empty!");
return null;
}
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(file, type);
}
public static <T> T toEntity(InputStream in, TypeReference type)
throws IOException {
if (in == null) {
throw new NullPointerException("Input stream cannot be null.");
}
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(in, type);
}
public static <T> T toEntity(String jsonStr, TypeReference type)
throws IOException {
if (StringUtils.isEmpty(jsonStr)) {
LOGGER.warn("Json string {} is empty!", type);
return null;
}
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(jsonStr, type);
}
}
| 4,057 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/FileUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import java.io.File;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileUtil {
private static final Logger LOGGER = LoggerFactory
.getLogger(FileUtil.class);
public static String getFilePath(String name, String location) {
if (StringUtils.isEmpty(location)) {
LOGGER.info("Location is empty. Read from default path.");
return null;
}
File file = new File(location);
LOGGER.info("File absolute path:" + file.getAbsolutePath());
File[] files = file.listFiles();
if (files == null) {
LOGGER.warn("The external location '{}' does not exist.Read from"
+ "default path.", location);
return null;
}
return getFilePath(name, files, location);
}
private static String getFilePath(String name, File[] files,
String location) {
String path = null;
for (File f : files) {
if (f.getName().equals(name)) {
path = location + File.separator + name;
LOGGER.info("config real path: {}", path);
}
}
return path;
}
}
| 4,058 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/FSUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.HDFS_FILE_NOT_EXIST;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.Charsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@Component
public class FSUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(FSUtil.class);
private static final int SAMPLE_ROW_COUNT = 100;
private static String fsDefaultName;
private static FileSystem fileSystem;
private static FileSystem defaultFS = getDefaultFileSystem();
private static FileSystem getDefaultFileSystem() {
FileSystem fs = null;
Configuration conf = new Configuration();
try {
fs = FileSystem.get(conf);
} catch (Exception e) {
LOGGER.error("Can not get default hdfs file system. {}", e);
}
return fs;
}
private static FileSystem getFileSystem() {
if (fileSystem == null) {
initFileSystem();
}
return fileSystem;
}
public FSUtil(@Value("${fs.defaultFS}") String defaultName) {
fsDefaultName = defaultName;
}
private static void initFileSystem() {
Configuration conf = new Configuration();
if (!StringUtils.isEmpty(fsDefaultName)) {
conf.set("fs.defaultFS", fsDefaultName);
LOGGER.info("Setting fs.defaultFS:{}", fsDefaultName);
}
if (StringUtils.isEmpty(conf.get("fs.hdfs.impl"))) {
LOGGER.info("Setting fs.hdfs.impl:{}", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
}
if (StringUtils.isEmpty(conf.get("fs.file.impl"))) {
LOGGER.info("Setting fs.file.impl:{}", org.apache.hadoop.fs.LocalFileSystem.class.getName());
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
}
try {
fileSystem = FileSystem.get(conf);
} catch (Exception e) {
LOGGER.error("Can not get hdfs file system. {}", e);
fileSystem = defaultFS;
}
}
/**
* list all sub dir of a dir
*/
public static List<String> listSubDir(String dir) throws IOException {
checkHDFSConf();
List<String> fileList = new ArrayList<>();
Path path = new Path(dir);
if (fileSystem.isFile(path)) {
return fileList;
}
FileStatus[] statuses = fileSystem.listStatus(path);
for (FileStatus fileStatus : statuses) {
if (fileStatus.isDirectory()) {
fileList.add(fileStatus.getPath().toString());
}
}
return fileList;
}
/**
* get all file status of a dir.
*/
public static List<FileStatus> listFileStatus(String dir) throws IOException {
checkHDFSConf();
List<FileStatus> fileStatusList = new ArrayList<>();
Path path = new Path(dir);
if (fileSystem.isFile(path)) {
return fileStatusList;
}
FileStatus[] statuses = fileSystem.listStatus(path);
for (FileStatus fileStatus : statuses) {
if (!fileStatus.isDirectory()) {
fileStatusList.add(fileStatus);
}
}
return fileStatusList;
}
/**
* touch file
*/
public static void touch(String filePath) throws IOException {
checkHDFSConf();
Path path = new Path(filePath);
FileStatus st;
if (fileSystem.exists(path)) {
st = fileSystem.getFileStatus(path);
if (st.isDirectory()) {
throw new IOException(filePath + " is a directory");
} else if (st.getLen() != 0) {
throw new IOException(filePath + " must be a zero-length file");
}
}
FSDataOutputStream out = null;
try {
out = fileSystem.create(path);
} finally {
if (out != null) {
out.close();
}
}
}
public static boolean isFileExist(String path) throws IOException {
checkHDFSConf();
Path hdfsPath = new Path(path);
return fileSystem.isFile(hdfsPath) || fileSystem.isDirectory(hdfsPath);
}
public static InputStream getSampleInputStream(String path)
throws IOException {
checkHDFSConf();
if (isFileExist(path)) {
FSDataInputStream missingData = fileSystem.open(new Path(path));
BufferedReader bufReader = new BufferedReader(
new InputStreamReader(missingData, Charsets.UTF_8));
try {
String line = null;
int rowCnt = 0;
StringBuilder output = new StringBuilder(1024);
while ((line = bufReader.readLine()) != null) {
if (rowCnt < SAMPLE_ROW_COUNT) {
output.append(line);
output.append("\n");
}
rowCnt++;
}
return IOUtils.toInputStream(output, Charsets.UTF_8);
} finally {
bufReader.close();
}
} else {
LOGGER.warn("HDFS file does not exist.", path);
throw new GriffinException.NotFoundException(HDFS_FILE_NOT_EXIST);
}
}
private static void checkHDFSConf() {
if (getFileSystem() == null) {
throw new NullPointerException("FileSystem is null. " +
"Please check your hdfs config default name.");
}
}
public static String getFirstMissRecordPath(String hdfsDir)
throws Exception {
List<FileStatus> fileList = listFileStatus(hdfsDir);
for (int i = 0; i < fileList.size(); i++) {
if (fileList.get(i).getPath().toUri().toString().toLowerCase()
.contains("missrecord")) {
return fileList.get(i).getPath().toUri().toString();
}
}
return null;
}
public static InputStream getMissSampleInputStream(String path)
throws Exception {
List<String> subDirList = listSubDir(path);
//FIXME: only handle 1-sub dir here now
for (int i = 0; i < subDirList.size(); i++) {
return getSampleInputStream(getFirstMissRecordPath(
subDirList.get(i)));
}
return getSampleInputStream(getFirstMissRecordPath(path));
}
}
| 4,059 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/PropertiesUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.apache.griffin.core.util.FileUtil.getFilePath;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
public class PropertiesUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(
PropertiesUtil.class);
public static Properties getProperties(String path, Resource resource) {
PropertiesFactoryBean propFactoryBean = new PropertiesFactoryBean();
Properties properties = null;
try {
propFactoryBean.setLocation(resource);
propFactoryBean.afterPropertiesSet();
properties = propFactoryBean.getObject();
LOGGER.info("Read properties successfully from {}.", path);
} catch (IOException e) {
LOGGER.error("Get properties from {} failed. {}", path, e);
}
return properties;
}
/**
* @param name properties name like quartz.properties
* @param defaultPath properties classpath like /quartz.properties
* @param location custom properties path
* @return Properties
* @throws FileNotFoundException location setting is wrong that there is no
* target file.
*/
public static Properties getConf(String name, String defaultPath,
String location)
throws FileNotFoundException {
String path = getConfPath(name, location);
Resource resource;
if (path == null) {
resource = new ClassPathResource(defaultPath);
path = defaultPath;
} else {
resource = new InputStreamResource(new FileInputStream(path));
}
return getProperties(path, resource);
}
public static String getConfPath(String name, String location) {
return getFilePath(name, location);
}
}
| 4,060 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/TimeUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(TimeUtil
.class);
private static final String MILLISECONDS_PATTERN =
"(?i)m(illi)?s(ec(ond)?)?";
private static final String SECONDS_PATTERN =
"(?i)s(ec(ond)?)?";
private static final String MINUTES_PATTERN =
"(?i)m(in(ute)?)?";
private static final String HOURS_PATTERN =
"(?i)h((ou)?r)?";
private static final String DAYS_PATTERN =
"(?i)d(ay)?";
private static class TimeUnitPair {
private long t;
private String unit;
TimeUnitPair(long t, String unit) {
this.t = t;
this.unit = unit;
}
}
public static Long str2Long(String timeStr) {
if (timeStr == null) {
LOGGER.warn("Time string can not be empty.");
return 0L;
}
String trimTimeStr = timeStr.trim();
boolean positive = true;
if (trimTimeStr.startsWith("-")) {
trimTimeStr = trimTimeStr.substring(1);
positive = false;
}
List<TimeUnitPair> list = getTimeUnitPairs(trimTimeStr);
return str2Long(positive, list);
}
private static Long str2Long(boolean positive, List<TimeUnitPair> list) {
long time = 0;
for (TimeUnitPair tu : list) {
long t = milliseconds(tu);
if (positive) {
time += t;
} else {
time -= t;
}
}
return time;
}
private static List<TimeUnitPair> getTimeUnitPairs(String timeStr) {
// "1d2h3m" -> "1d", "2h", "3m"
String timePattern = "(?i)(\\d+)([a-zA-Z]+)";
Pattern pattern = Pattern.compile(timePattern);
Matcher matcher = pattern.matcher(timeStr);
List<TimeUnitPair> list = new ArrayList<>();
while (matcher.find()) {
String num = matcher.group(1);
String unit = matcher.group(2);
TimeUnitPair tu = new TimeUnitPair(Long.valueOf(num), unit);
list.add(tu);
}
return list;
}
private static Long milliseconds(TimeUnitPair tu) {
long t = tu.t;
String unit = tu.unit;
if (unit.matches(MILLISECONDS_PATTERN)) {
return milliseconds(t, TimeUnit.MILLISECONDS);
} else if (unit.matches(SECONDS_PATTERN)) {
return milliseconds(t, TimeUnit.SECONDS);
} else if (unit.matches(MINUTES_PATTERN)) {
return milliseconds(t, TimeUnit.MINUTES);
} else if (unit.matches(HOURS_PATTERN)) {
return milliseconds(t, TimeUnit.HOURS);
} else if (unit.matches(DAYS_PATTERN)) {
return milliseconds(t, TimeUnit.DAYS);
} else {
LOGGER.warn("Time string format ERROR. " +
"It only supports d(day),h(hour), m(minute), " +
"s(second), ms(millsecond). " +
"Please check your time format.");
return 0L;
}
}
private static Long milliseconds(long duration, TimeUnit unit) {
return unit.toMillis(duration);
}
public static String format(String timeFormat, long time, TimeZone timeZone) {
String timePattern = "#(?:\\\\#|[^#])*#";
Date t = new Date(time);
Pattern ptn = Pattern.compile(timePattern);
Matcher matcher = ptn.matcher(timeFormat);
StringBuffer sb = new StringBuffer();
while (matcher.find()) {
String group = matcher.group();
String content = group.substring(1, group.length() - 1);
String pattern = refreshEscapeHashTag(content);
SimpleDateFormat sdf = new SimpleDateFormat(pattern);
sdf.setTimeZone(timeZone);
matcher.appendReplacement(sb, sdf.format(t));
}
matcher.appendTail(sb);
return refreshEscapeHashTag(sb.toString());
}
private static String refreshEscapeHashTag(String str) {
String escapeHashTagPattern = "\\\\#";
String hashTag = "#";
return str.replaceAll(escapeHashTagPattern, hashTag);
}
public static TimeZone getTimeZone(String timezone) {
if (StringUtils.isEmpty(timezone)) {
return TimeZone.getDefault();
}
return TimeZone.getTimeZone(timezone);
}
}
| 4,061 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/MeasureUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_CONNECTOR_NAME;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_MEASURE_PREDICATE;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.MISSING_METRIC_NAME;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.job.factory.PredicatorFactory;
import org.apache.griffin.core.measure.entity.DataConnector;
import org.apache.griffin.core.measure.entity.DataSource;
import org.apache.griffin.core.measure.entity.ExternalMeasure;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.Measure;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MeasureUtil {
private static final Logger LOGGER = LoggerFactory
.getLogger(MeasureUtil.class);
public static void validateMeasure(Measure measure) {
if (measure instanceof GriffinMeasure) {
validateGriffinMeasure((GriffinMeasure) measure);
} else if (measure instanceof ExternalMeasure) {
validateExternalMeasure((ExternalMeasure) measure);
}
}
private static void validateGriffinMeasure(GriffinMeasure measure) {
if (getConnectorNamesIfValid(measure) == null) {
throw new GriffinException.BadRequestException
(INVALID_CONNECTOR_NAME);
}
if (!validatePredicates(measure)) {
throw new GriffinException.BadRequestException(INVALID_MEASURE_PREDICATE);
}
}
private static boolean validatePredicates(GriffinMeasure measure) {
for (DataSource dataSource : measure.getDataSources()) {
for (SegmentPredicate segmentPredicate : dataSource.getConnector().getPredicates()) {
try {
PredicatorFactory.newPredicateInstance(segmentPredicate);
} catch (Exception e) {
return false;
}
}
}
return true;
}
private static void validateExternalMeasure(ExternalMeasure measure) {
if (StringUtils.isBlank(measure.getMetricName())) {
LOGGER.warn("Failed to create external measure {}. " +
"Its metric name is blank.", measure.getName());
throw new GriffinException.BadRequestException(MISSING_METRIC_NAME);
}
}
private static List<String> getConnectorNamesIfValid(GriffinMeasure measure) {
Set<String> sets = new HashSet<>();
List<DataSource> sources = measure.getDataSources();
for (DataSource source : sources) {
if(source.getConnector() != null && source.getConnector().getName() != null){
sets.add(source.getConnector().getName());
}
}
if (sets.size() == 0 || sets.size() < sources.size()) {
LOGGER.warn("Connector names cannot be repeated or empty.");
return null;
}
return new ArrayList<>(sets);
}
}
| 4,062 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/util/YarnNetUtil.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.util;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.DEAD;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate;
public class YarnNetUtil {
private static final Logger LOGGER = LoggerFactory
.getLogger(YarnNetUtil.class);
private static RestTemplate restTemplate = new RestTemplate();
/**
* delete app task scheduling by yarn.
*
* @param url prefix part of whole url
* @param appId application id
*/
public static void delete(String url, String appId) {
try {
if (appId != null) {
LOGGER.info("{} will delete by yarn", appId);
restTemplate.put(url + "ws/v1/cluster/apps/"
+ appId + "/state",
"{\"state\": \"KILLED\"}");
}
} catch (HttpClientErrorException e) {
LOGGER.warn("client error {} from yarn: {}",
e.getMessage(), e.getResponseBodyAsString());
} catch (Exception e) {
LOGGER.error("delete exception happens by yarn. {}", e);
}
}
/**
* update app task scheduling by yarn.
*
* @param url prefix part of whole url
* @param instance job instance
* @return
*/
public static boolean update(String url, JobInstanceBean instance) {
try {
url += "/ws/v1/cluster/apps/" + instance.getAppId();
String result = restTemplate.getForObject(url, String.class);
JsonObject state = parse(result);
if (state != null) {
instance.setState(LivySessionStates.toLivyState(state));
}
return true;
} catch (HttpClientErrorException e) {
LOGGER.warn("client error {} from yarn: {}",
e.getMessage(), e.getResponseBodyAsString());
if (e.getStatusCode() == HttpStatus.NOT_FOUND) {
// in sync with Livy behavior, see com.cloudera.livy.utils.SparkYarnApp
instance.setState(DEAD);
return true;
}
} catch (Exception e) {
LOGGER.error("update exception happens by yarn. {}", e);
}
return false;
}
/**
* parse json string and get app json object.
*
* @param json json string
* @return
*/
public static JsonObject parse(String json) {
if (StringUtils.isEmpty(json)) {
LOGGER.warn("Input string is empty.");
return null;
}
JsonParser parser = new JsonParser();
return parser.parse(json).getAsJsonObject().getAsJsonObject("app");
}
}
| 4,063 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import static org.apache.griffin.core.config.EnvConfig.getBatchEnv;
import static org.apache.griffin.core.config.EnvConfig.getStreamingEnv;
import static org.apache.griffin.core.util.JsonUtil.toEntity;
import static org.apache.griffin.core.util.PropertiesUtil.getConf;
import static org.apache.griffin.core.util.PropertiesUtil.getConfPath;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import java.util.Properties;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
/**
* PropertiesConfig is responsible for initializing configuration objects
* from property files.
*
* @see EnvConfig
* @see org.apache.griffin.core.util.PropertiesUtil
*/
@Configuration
public class PropertiesConfig {
private static final Logger LOGGER = LoggerFactory
.getLogger(PropertiesConfig.class);
public static Map<String, Object> livyConfMap;
private String configLocation;
private String envLocation;
public PropertiesConfig(
@Value("${external.config.location}") String configLocation,
@Value("${external.env.location}") String envLocation) {
LOGGER.info("external.config.location : {}",
configLocation != null ? configLocation : "null");
LOGGER.info("external.env.location : {}",
envLocation != null ? envLocation : "null");
this.configLocation = configLocation;
this.envLocation = envLocation;
}
@PostConstruct
public void init() throws IOException {
String batchName = "env_batch.json";
String batchPath = "env/" + batchName;
String streamingName = "env_streaming.json";
String streamingPath = "env/" + streamingName;
String livyConfName = "sparkProperties.json";
getBatchEnv(batchName, batchPath, envLocation);
getStreamingEnv(streamingName, streamingPath, envLocation);
genLivyConf(livyConfName, livyConfName, configLocation);
}
/**
* Config quartz.properties will be replaced if it's found in external
* .config.location setting.
*
* @return Properties
* @throws FileNotFoundException It'll throw FileNotFoundException
* when path is wrong.
*/
@Bean(name = "quartzConf")
public Properties quartzConf() throws FileNotFoundException {
String name = "quartz.properties";
String defaultPath = "/" + name;
return getConf(name, defaultPath, configLocation);
}
private static void genLivyConf(
String name,
String defaultPath,
String location) throws IOException {
if (livyConfMap != null) {
return;
}
String path = getConfPath(name, location);
if (path == null) {
livyConfMap = readPropertiesFromResource(defaultPath);
} else {
FileInputStream in = new FileInputStream(path);
livyConfMap = toEntity(in, new TypeReference<Map>() {
});
}
}
/**
* read env config from resource
*
* @param path resource path
* @return Map
* @throws IOException io exception
*/
private static Map<String, Object> readPropertiesFromResource(String path)
throws IOException {
if (path == null) {
LOGGER.warn("Parameter path is null.");
return null;
}
// Be careful, here we use getInputStream() to convert path file to
// stream. It'll cause FileNotFoundException if you use getFile()
// to convert path file to File Object
InputStream in = new ClassPathResource(path).getInputStream();
return toEntity(in, new TypeReference<Map<String, Object>>() {
});
}
}
| 4,064 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import java.util.Properties;
import javax.sql.DataSource;
import org.apache.griffin.core.job.factory.AutowiringSpringBeanJobFactory;
import org.quartz.spi.JobFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
@Configuration
public class SchedulerConfig {
private final Properties quartzConf;
@Autowired
public SchedulerConfig(@Qualifier("quartzConf") Properties quartzConf) {
this.quartzConf = quartzConf;
}
@Bean
public JobFactory jobFactory(ApplicationContext applicationContext) {
AutowiringSpringBeanJobFactory jobFactory =
new AutowiringSpringBeanJobFactory();
jobFactory.setApplicationContext(applicationContext);
return jobFactory;
}
@Bean
public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource,
JobFactory jobFactory) {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
factory.setOverwriteExistingJobs(true);
factory.setDataSource(dataSource);
factory.setJobFactory(jobFactory);
factory.setQuartzProperties(quartzConf);
return factory;
}
}
| 4,065 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/config/CacheConfig.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import org.apache.commons.lang.StringUtils;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.interceptor.KeyGenerator;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@EnableCaching
public class CacheConfig {
@Bean
public KeyGenerator cacheKeyGenerator() {
return (o, method, objects) -> {
StringBuilder sb = new StringBuilder(method.getName());
String params = StringUtils.join(objects);
if (!StringUtils.isEmpty(params)) {
sb.append(params);
}
return sb.toString();
};
}
}
| 4,066 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/config/EnvConfig.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import static org.apache.griffin.core.util.FileUtil.getFilePath;
import static org.apache.griffin.core.util.JsonUtil.toEntity;
import static org.apache.griffin.core.util.JsonUtil.toJsonWithFormat;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
public class EnvConfig {
private static final Logger LOGGER = LoggerFactory
.getLogger(EnvConfig.class);
public static String ENV_BATCH;
public static String ENV_STREAMING;
/**
* read env config from resource.
*
* @param path resource path
* @return String
* @throws IOException io exception
*/
private static String readEnvFromResource(String path)
throws IOException {
if (path == null) {
LOGGER.warn("Parameter path is null.");
return null;
}
// Be careful, here we use getInputStream() to convert
// path file to stream.
// It'll cause FileNotFoundException if you use getFile()
// to convert path file to File Object
InputStream in = new ClassPathResource(path).getInputStream();
Object result = null;
try {
result = toEntity(in, new TypeReference<Object>() {
});
} finally {
in.close();
}
return toJsonWithFormat(result);
}
/**
* read env config from resource.
*
* @param path resource path
* @return String
* @throws IOException io exception
*/
private static String readEnvFromAbsolutePath(String path)
throws IOException {
if (path == null) {
LOGGER.warn("Parameter path is null.");
return null;
}
FileInputStream in = new FileInputStream(path);
Object result = null;
try {
result = toEntity(in, new TypeReference<Object>() {
});
} finally {
in.close();
}
return toJsonWithFormat(result);
}
/**
* read batch env.
*
* @param name batch env name that you need to search
* @param defaultPath If there is no target file in location directory,
* it'll read from default path.
* @param location env path that you configure in application.properties
* @return String
* @throws IOException io exception
*/
static String getBatchEnv(String name, String defaultPath, String location)
throws IOException {
if (ENV_BATCH != null) {
return ENV_BATCH;
}
String path = getFilePath(name, location);
if (path == null) {
path = defaultPath;
ENV_BATCH = readEnvFromResource(path);
} else {
ENV_BATCH = readEnvFromAbsolutePath(path);
}
LOGGER.info(ENV_BATCH);
return ENV_BATCH;
}
static String getStreamingEnv(String name,
String defaultPath,
String location)
throws IOException {
if (ENV_STREAMING != null) {
return ENV_STREAMING;
}
String path = getFilePath(name, location);
if (path == null) {
path = defaultPath;
ENV_STREAMING = readEnvFromResource(path);
} else {
ENV_STREAMING = readEnvFromAbsolutePath(path);
}
LOGGER.info(ENV_STREAMING);
return ENV_STREAMING;
}
}
| 4,067 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/config/LoginConfig.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import org.apache.griffin.core.login.LoginService;
import org.apache.griffin.core.login.LoginServiceDefaultImpl;
import org.apache.griffin.core.login.LoginServiceLdapImpl;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class LoginConfig {
@Value("${login.strategy}")
private String strategy;
@Value("${ldap.url}")
private String url;
@Value("${ldap.email}")
private String email;
@Value("${ldap.searchBase}")
private String searchBase;
@Value("${ldap.searchPattern}")
private String searchPattern;
@Value("${ldap.sslSkipVerify:false}")
private boolean sslSkipVerify;
@Value("${ldap.bindDN:}")
private String bindDN;
@Value("${ldap.bindPassword:}")
private String bindPassword;
@Bean
public LoginService loginService() {
switch (strategy) {
case "default":
return new LoginServiceDefaultImpl();
case "ldap":
return new LoginServiceLdapImpl(url, email, searchBase,
searchPattern, sslSkipVerify, bindDN, bindPassword);
default:
return null;
}
}
}
| 4,068 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/config/EclipseLinkJpaConfig.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.config;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.eclipse.persistence.config.PersistenceUnitProperties;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.orm.jpa.JpaBaseConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.JpaProperties;
import org.springframework.boot.autoconfigure.transaction.TransactionManagerCustomizers;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.vendor.AbstractJpaVendorAdapter;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaVendorAdapter;
import org.springframework.transaction.jta.JtaTransactionManager;
@Configuration
@ComponentScan("org.apache.griffin.core")
public class EclipseLinkJpaConfig extends JpaBaseConfiguration {
protected EclipseLinkJpaConfig(
DataSource ds, JpaProperties properties,
ObjectProvider<JtaTransactionManager> jtm,
ObjectProvider<TransactionManagerCustomizers> tmc) {
super(ds, properties, jtm, tmc);
}
@Override
protected AbstractJpaVendorAdapter createJpaVendorAdapter() {
return new EclipseLinkJpaVendorAdapter();
}
@Override
protected Map<String, Object> getVendorProperties() {
Map<String, Object> map = new HashMap<>();
map.put(PersistenceUnitProperties.WEAVING, "false");
map.put(PersistenceUnitProperties.DDL_GENERATION,
"create-or-extend-tables");
return map;
}
}
| 4,069 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/info/GriffinInfoController.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.info;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/v1")
public class GriffinInfoController {
@RequestMapping(value = "/version", method = RequestMethod.GET)
public String greeting() {
return "0.5.0";
}
}
| 4,070 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.common;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class SimpleCORSFilter implements Filter {
@Override
public void doFilter(final ServletRequest req,
final ServletResponse res,
final FilterChain chain)
throws IOException, ServletException {
HttpServletResponse response = (HttpServletResponse) res;
response.setHeader("Access-Control-Allow-Origin", "*");
response.setHeader("Access-Control-Allow-Methods",
"POST, GET, OPTIONS, DELETE,PUT");
response.setHeader("Access-Control-Max-Age", "3600");
response.setHeader("Access-Control-Allow-Headers",
"X-PINGOTHER, Origin, X-Requested-With, Content-Type, Accept");
chain.doFilter(req, res);
}
@Override
public void init(final FilterConfig filterConfig) {
}
@Override
public void destroy() {
}
}
| 4,071 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/StreamingJobOperatorImpl.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_JOB_NAME;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.STREAMING_JOB_IS_RUNNING;
import static org.apache.griffin.core.job.JobServiceImpl.START;
import static org.apache.griffin.core.job.JobServiceImpl.STOP;
import static org.apache.griffin.core.job.entity.LivySessionStates.State;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.STOPPED;
import static org.apache.griffin.core.job.entity.LivySessionStates.convert2QuartzState;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.STREAMING;
import static org.quartz.TriggerKey.triggerKey;
import java.util.List;
import javax.annotation.PostConstruct;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.JobState;
import org.apache.griffin.core.job.entity.StreamingJob;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.job.repo.StreamingJobRepo;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.util.YarnNetUtil;
import org.quartz.JobKey;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.web.client.ResourceAccessException;
import org.springframework.web.client.RestClientException;
@Service
public class StreamingJobOperatorImpl implements JobOperator {
private static final Logger LOGGER = LoggerFactory
.getLogger(StreamingJobOperatorImpl.class);
@Autowired
private StreamingJobRepo streamingJobRepo;
@Autowired
private Environment env;
@Autowired
private JobServiceImpl jobService;
@Autowired
private JobInstanceRepo instanceRepo;
@Autowired
@Qualifier("schedulerFactoryBean")
private SchedulerFactoryBean factory;
@Autowired
private LivyTaskSubmitHelper livyTaskSubmitHelper;
private String livyUri;
@PostConstruct
public void init() {
livyUri = env.getProperty("livy.uri");
}
@Override
@Transactional(rollbackFor = Exception.class)
public AbstractJob add(AbstractJob job, GriffinMeasure measure) throws
Exception {
validateParams(job);
String qName = jobService.getQuartzName(job);
String qGroup = jobService.getQuartzGroup();
TriggerKey triggerKey = jobService.getTriggerKeyIfValid(qName, qGroup);
StreamingJob streamingJob = genStreamingJobBean(job, qName, qGroup);
streamingJob = streamingJobRepo.save(streamingJob);
jobService.addJob(triggerKey, streamingJob, STREAMING);
return streamingJob;
}
private StreamingJob genStreamingJobBean(AbstractJob job, String qName,
String qGroup) {
StreamingJob streamingJob = (StreamingJob) job;
streamingJob.setMetricName(job.getJobName());
streamingJob.setGroup(qGroup);
streamingJob.setName(qName);
return streamingJob;
}
/**
* active states: NOT_STARTED, STARTING, RECOVERING, IDLE, RUNNING, BUSY
* inactive states: SHUTTING_DOWN, ERROR, DEAD, SUCCESS
*
* @param job streaming job
*/
@Override
@Transactional(rollbackFor = Exception.class)
public void start(AbstractJob job) throws Exception {
StreamingJob streamingJob = (StreamingJob) job;
verifyJobState(streamingJob);
streamingJob = streamingJobRepo.save(streamingJob);
String qName = jobService.getQuartzName(job);
String qGroup = jobService.getQuartzGroup();
TriggerKey triggerKey = triggerKey(qName, qGroup);
jobService.addJob(triggerKey, streamingJob, STREAMING);
}
private void verifyJobState(AbstractJob job) throws SchedulerException {
/* Firstly you should check whether job is scheduled.
If it is scheduled, triggers are empty. */
List<? extends Trigger> triggers = jobService.getTriggers(
job.getName(),
job.getGroup());
if (!CollectionUtils.isEmpty(triggers)) {
throw new GriffinException.BadRequestException
(STREAMING_JOB_IS_RUNNING);
}
/* Secondly you should check whether job instance is running. */
List<JobInstanceBean> instances = instanceRepo.findByJobId(job.getId());
instances.stream().filter(instance -> !instance.isDeleted()).forEach
(instance -> {
State state = instance.getState();
String quartzState = convert2QuartzState(state);
if (!getStartStatus(quartzState)) {
throw new GriffinException.BadRequestException
(STREAMING_JOB_IS_RUNNING);
}
instance.setDeleted(true);
});
}
@Override
@Transactional(rollbackFor = Exception.class)
public void stop(AbstractJob job) throws SchedulerException {
StreamingJob streamingJob = (StreamingJob) job;
stop(streamingJob, false);
}
@Override
public void delete(AbstractJob job) throws SchedulerException {
StreamingJob streamingJob = (StreamingJob) job;
stop(streamingJob, true);
}
@Override
public JobHealth getHealth(JobHealth jobHealth, AbstractJob job) {
jobHealth.setJobCount(jobHealth.getJobCount() + 1);
if (jobService.isJobHealthy(job.getId())) {
jobHealth.setHealthyJobCount(jobHealth.getHealthyJobCount() + 1);
}
return jobHealth;
}
@Override
public JobState getState(AbstractJob job, String action) {
JobState jobState = new JobState();
List<JobInstanceBean> instances = instanceRepo
.findByJobId(job.getId());
for (JobInstanceBean instance : instances) {
State state = instance.getState();
if (!instance.isDeleted() && state != null) {
String quartzState = convert2QuartzState(state);
jobState.setState(quartzState);
jobState.setToStart(getStartStatus(quartzState));
jobState.setToStop(getStopStatus(quartzState));
break;
}
}
setStateIfNull(action, jobState);
return jobState;
}
private void setStateIfNull(String action, JobState jobState) {
if (jobState.getState() == null && START.equals(action)) {
jobState.setState("NORMAL");
jobState.setToStop(true);
} else if (jobState.getState() == null || STOP.equals(action)) {
jobState.setState("NONE");
jobState.setToStart(true);
}
}
/**
* NORMAL or BLOCKED state of job cannot be started
*
* @param state job state
* @return true: job can be started, false: job is running which cannot be
* started
*/
private boolean getStartStatus(String state) {
return !"NORMAL".equals(state) && !"BLOCKED".equals(state);
}
/**
* COMPLETE or ERROR state of job cannot be stopped
*
* @param state job state
* @return true: job can be stopped, false: job is running which cannot be
* stopped
*/
private boolean getStopStatus(String state) {
return !"COMPLETE".equals(state) && !"ERROR".equals(state);
}
private void deleteByLivy(JobInstanceBean instance) {
Long sessionId = instance.getSessionId();
if (sessionId == null) {
LOGGER.warn("Session id of instance({},{}) is null.", instance
.getPredicateGroup(), instance.getPredicateName
());
return;
}
String url = livyUri + "/" + instance.getSessionId();
try {
// Use livy helper to interact with livy
livyTaskSubmitHelper.deleteByLivy(url);
LOGGER.info("Job instance({}) has been deleted. {}", instance
.getSessionId(), url);
} catch (ResourceAccessException e) {
LOGGER.error("Your url may be wrong. Please check {}.\n {}",
livyUri, e.getMessage());
} catch (RestClientException e) {
LOGGER.warn("sessionId({}) appId({}) {}.", instance.getSessionId(),
instance.getAppId(), e.getMessage());
YarnNetUtil.delete(env.getProperty("yarn.uri"),
instance.getAppId());
}
}
/**
* @param job streaming job
* @param delete true: delete job, false: only stop instance, but not delete
* job
*/
private void stop(StreamingJob job, boolean delete) throws
SchedulerException {
pauseJob(job);
/* to prevent situation that streaming job is submitted
before pause or when pausing. */
List<JobInstanceBean> instances = instanceRepo
.findByJobId(job.getId());
instances.stream().filter(instance -> !instance.isDeleted())
.forEach(instance -> {
State state = instance.getState();
String quartzState = convert2QuartzState(state);
if (getStopStatus(quartzState)) {
deleteByLivy(instance);
}
instance.setState(STOPPED);
instance.setDeleted(true);
});
job.setDeleted(delete);
streamingJobRepo.save(job);
}
private void pauseJob(StreamingJob job) throws SchedulerException {
String name = job.getName();
String group = job.getGroup();
List<? extends Trigger> triggers = jobService.getTriggers(name, group);
if (!CollectionUtils.isEmpty(triggers)) {
factory.getScheduler().pauseJob(JobKey.jobKey(name, group));
}
}
private void validateParams(AbstractJob job) {
if (!jobService.isValidJobName(job.getJobName())) {
throw new GriffinException.BadRequestException(INVALID_JOB_NAME);
}
}
}
| 4,072 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/Predicator.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import java.io.IOException;
/**
* Predicator is an object that judges if one condition is met.
* This interface only has one method {@link #predicate()}
*/
public interface Predicator {
/**
* predicate a condition
*
* @return True condition is met, otherwise False
* @throws IOException
*/
boolean predicate() throws IOException;
}
| 4,073 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/JobService.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import java.util.List;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.quartz.SchedulerException;
public interface JobService {
List<AbstractJob> getAliveJobs(String type);
AbstractJob addJob(AbstractJob js) throws Exception;
AbstractJob getJobConfig(Long jobId);
AbstractJob onAction(Long jobId, String action) throws Exception;
void deleteJob(Long jobId) throws SchedulerException;
void deleteJob(String jobName) throws SchedulerException;
List<JobInstanceBean> findInstancesOfJob(Long jobId, int page, int size);
List<JobInstanceBean> findInstancesByTriggerKey(String triggerKey);
JobHealth getHealthInfo();
String getJobHdfsSinksPath(String jobName, long timestamp);
JobInstanceBean findInstance(Long id);
String triggerJobById(Long id) throws SchedulerException;
}
| 4,074 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/LivyTaskSubmitHelper.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.config.PropertiesConfig.livyConfMap;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_FOUND;
import static org.apache.griffin.core.util.JsonUtil.toEntity;
import static org.apache.griffin.core.util.JsonUtil.toJsonWithFormat;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.PostConstruct;
import org.apache.commons.collections.map.HashedMap;
import org.quartz.JobDetail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.env.Environment;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.security.kerberos.client.KerberosRestTemplate;
import org.springframework.stereotype.Component;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate;
@Component
public class LivyTaskSubmitHelper {
private static final Logger LOGGER = LoggerFactory.getLogger(LivyTaskSubmitHelper.class);
private static final String REQUEST_BY_HEADER = "X-Requested-By";
public static final int DEFAULT_QUEUE_SIZE = 20000;
private static final int SLEEP_TIME = 300;
@Autowired
private SparkSubmitJob sparkSubmitJob;
private ConcurrentMap<Long, Integer> taskAppIdMap = new ConcurrentHashMap<>();
// Current number of tasks
private AtomicInteger curConcurrentTaskNum = new AtomicInteger(0);
private String workerNamePre;
private RestTemplate restTemplate = new RestTemplate();
// queue for pub or sub
private BlockingQueue<JobDetail> queue;
private String uri;
@Value("${livy.task.max.concurrent.count:20}")
private int maxConcurrentTaskCount;
@Value("${livy.task.submit.interval.second:3}")
private int batchIntervalSecond;
@Autowired
private Environment env;
/**
* Initialize related parameters and open consumer threads.
*/
@PostConstruct
public void init() {
startWorker();
uri = env.getProperty("livy.uri");
LOGGER.info("Livy uri : {}", uri);
}
public LivyTaskSubmitHelper() {
this.workerNamePre = "livy-task-submit-worker";
}
/**
* Initialize blocking queues and start consumer threads.
*/
public void startWorker() {
queue = new LinkedBlockingQueue<>(DEFAULT_QUEUE_SIZE);
ExecutorService executorService = Executors.newSingleThreadExecutor();
TaskInner taskInner = new TaskInner(executorService);
executorService.execute(taskInner);
}
/**
* Put job detail into the queue.
*
* @param jd job detail.
*/
public void addTaskToWaitingQueue(JobDetail jd) throws IOException {
if (jd == null) {
LOGGER.warn("task is blank, workerNamePre: {}", workerNamePre);
return;
}
if (queue.remainingCapacity() <= 0) {
LOGGER.warn("task is discard, workerNamePre: {}, task: {}", workerNamePre, jd);
sparkSubmitJob.saveJobInstance(null, NOT_FOUND);
return;
}
queue.add(jd);
LOGGER.info("add_task_to_waiting_queue_success, workerNamePre: {}, task: {}",
workerNamePre, jd);
}
/**
* Consumer thread.
*/
class TaskInner implements Runnable {
private ExecutorService es;
public TaskInner(ExecutorService es) {
this.es = es;
}
public void run() {
long insertTime = System.currentTimeMillis();
while (true) {
try {
if (curConcurrentTaskNum.get() < maxConcurrentTaskCount
&& (System.currentTimeMillis() - insertTime) >= batchIntervalSecond * 1000) {
JobDetail jd = queue.take();
sparkSubmitJob.saveJobInstance(jd);
insertTime = System.currentTimeMillis();
} else {
Thread.sleep(SLEEP_TIME);
}
} catch (Exception e) {
LOGGER.error("Async_worker_doTask_failed, {}", e.getMessage(), e);
es.execute(this);
}
}
}
}
/**
* Add the batch id returned by Livy.
*
* @param scheduleId livy batch id.
*/
public void increaseCurTaskNum(Long scheduleId) {
curConcurrentTaskNum.incrementAndGet();
if (scheduleId != null) {
taskAppIdMap.put(scheduleId, 1);
}
}
/**
* Remove tasks after job status updates.
*
* @param scheduleId livy batch id.
*/
public void decreaseCurTaskNum(Long scheduleId) {
if (scheduleId != null && taskAppIdMap.containsKey(scheduleId)) {
curConcurrentTaskNum.decrementAndGet();
taskAppIdMap.remove(scheduleId);
}
}
protected Map<String, Object> retryLivyGetAppId(String result, int appIdRetryCount)
throws IOException {
int retryCount = appIdRetryCount;
TypeReference<HashMap<String, Object>> type =
new TypeReference<HashMap<String, Object>>() {
};
Map<String, Object> resultMap = toEntity(result, type);
if (retryCount <= 0) {
return null;
}
if (resultMap.get("appId") != null) {
return resultMap;
}
Object livyBatchesId = resultMap.get("id");
if (livyBatchesId == null) {
return resultMap;
}
while (retryCount-- > 0) {
try {
Thread.sleep(SLEEP_TIME);
} catch (InterruptedException e) {
LOGGER.error(e.getMessage(), e);
}
resultMap = getResultByLivyId(livyBatchesId, type);
LOGGER.info("retry get livy resultMap: {}, batches id : {}", resultMap, livyBatchesId);
if (resultMap.get("appId") != null) {
break;
}
}
return resultMap;
}
private Map<String, Object> getResultByLivyId(Object livyBatchesId, TypeReference<HashMap<String, Object>> type)
throws IOException {
Map<String, Object> resultMap = new HashedMap();
String livyUri = uri + "/" + livyBatchesId;
String result = getFromLivy(livyUri);
LOGGER.info(result);
return result == null ? resultMap : toEntity(result, type);
}
public String postToLivy(String uri) {
LOGGER.info("Post To Livy URI is: " + uri);
String needKerberos = env.getProperty("livy.need.kerberos");
LOGGER.info("Need Kerberos:" + needKerberos);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.set(REQUEST_BY_HEADER, "admin");
if (needKerberos == null || needKerberos.isEmpty()) {
LOGGER.error("The property \"livy.need.kerberos\" is empty");
return null;
}
if (needKerberos.equalsIgnoreCase("false")) {
LOGGER.info("The livy server doesn't need Kerberos Authentication");
String result = null;
try {
HttpEntity<String> springEntity = new HttpEntity<>(toJsonWithFormat(livyConfMap), headers);
result = restTemplate.postForObject(uri, springEntity, String.class);
LOGGER.info(result);
} catch (HttpClientErrorException e) {
LOGGER.error("Post to livy ERROR. \n response status : " + e.getMessage()
+ "\n response header : " + e.getResponseHeaders()
+ "\n response body : " + e.getResponseBodyAsString());
} catch (JsonProcessingException e) {
LOGGER.error("Json Parsing failed, {}", e.getMessage(), e);
} catch (Exception e) {
LOGGER.error("Post to livy ERROR. \n {}", e);
}
return result;
} else {
LOGGER.info("The livy server needs Kerberos Authentication");
String userPrincipal = env.getProperty("livy.server.auth.kerberos.principal");
String keyTabLocation = env.getProperty("livy.server.auth.kerberos.keytab");
LOGGER.info("principal:{}, lcoation:{}", userPrincipal, keyTabLocation);
KerberosRestTemplate restTemplate = new KerberosRestTemplate(keyTabLocation, userPrincipal);
HttpEntity<String> springEntity = null;
try {
springEntity = new HttpEntity<>(toJsonWithFormat(livyConfMap), headers);
} catch (HttpClientErrorException e) {
LOGGER.error("Post to livy ERROR. \n response status : " + e.getMessage()
+ "\n response header : " + e.getResponseHeaders()
+ "\n response body : " + e.getResponseBodyAsString());
} catch (JsonProcessingException e) {
LOGGER.error("Json Parsing failed, {}", e.getMessage(), e);
} catch (Exception e) {
LOGGER.error("Post to livy ERROR. {}", e.getMessage(), e);
}
String result = restTemplate.postForObject(uri, springEntity, String.class);
LOGGER.info(result);
return result;
}
}
public String getFromLivy(String uri) {
LOGGER.info("Get From Livy URI is: " + uri);
String needKerberos = env.getProperty("livy.need.kerberos");
LOGGER.info("Need Kerberos:" + needKerberos);
if (needKerberos == null || needKerberos.isEmpty()) {
LOGGER.error("The property \"livy.need.kerberos\" is empty");
return null;
}
if (needKerberos.equalsIgnoreCase("false")) {
LOGGER.info("The livy server doesn't need Kerberos Authentication");
return restTemplate.getForObject(uri, String.class);
} else {
LOGGER.info("The livy server needs Kerberos Authentication");
String userPrincipal = env.getProperty("livy.server.auth.kerberos.principal");
String keyTabLocation = env.getProperty("livy.server.auth.kerberos.keytab");
LOGGER.info("principal:{}, lcoation:{}", userPrincipal, keyTabLocation);
KerberosRestTemplate restTemplate = new KerberosRestTemplate(keyTabLocation, userPrincipal);
String result = restTemplate.getForObject(uri, String.class);
LOGGER.info(result);
return result;
}
}
public void deleteByLivy(String uri) {
LOGGER.info("Delete by Livy URI is: " + uri);
String needKerberos = env.getProperty("livy.need.kerberos");
LOGGER.info("Need Kerberos:" + needKerberos);
if (needKerberos == null || needKerberos.isEmpty()) {
LOGGER.error("The property \"livy.need.kerberos\" is empty");
return;
}
if (needKerberos.equalsIgnoreCase("false")) {
LOGGER.info("The livy server doesn't need Kerberos Authentication");
new RestTemplate().delete(uri);
} else {
LOGGER.info("The livy server needs Kerberos Authentication");
String userPrincipal = env.getProperty("livy.server.auth.kerberos.principal");
String keyTabLocation = env.getProperty("livy.server.auth.kerberos.keytab");
LOGGER.info("principal:{}, lcoation:{}", userPrincipal, keyTabLocation);
KerberosRestTemplate restTemplate = new KerberosRestTemplate(keyTabLocation, userPrincipal);
restTemplate.delete(uri);
}
}
}
| 4,075 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/JobOperator.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobState;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.quartz.SchedulerException;
public interface JobOperator {
AbstractJob add(AbstractJob job, GriffinMeasure measure)
throws Exception;
void start(AbstractJob job) throws Exception;
void stop(AbstractJob job) throws SchedulerException;
void delete(AbstractJob job) throws SchedulerException;
JobHealth getHealth(JobHealth jobHealth, AbstractJob job)
throws SchedulerException;
JobState getState(AbstractJob job, String action)
throws SchedulerException;
}
| 4,076 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static java.util.TimeZone.getTimeZone;
import static org.apache.griffin.core.config.EnvConfig.ENV_BATCH;
import static org.apache.griffin.core.config.EnvConfig.ENV_STREAMING;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INSTANCE_ID_DOES_NOT_EXIST;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_MEASURE_ID;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_ID_DOES_NOT_EXIST;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_NAME_DOES_NOT_EXIST;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_TYPE_DOES_NOT_SUPPORT;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.MEASURE_TYPE_DOES_NOT_SUPPORT;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.NO_SUCH_JOB_ACTION;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.QUARTZ_JOB_ALREADY_EXIST;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.BUSY;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.DEAD;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.IDLE;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_STARTED;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RECOVERING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RUNNING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.STARTING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.SUCCESS;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.UNKNOWN;
import static org.apache.griffin.core.job.entity.LivySessionStates.isActive;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.STREAMING;
import static org.quartz.CronScheduleBuilder.cronSchedule;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.JobKey.jobKey;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
import static org.quartz.TriggerBuilder.newTrigger;
import static org.quartz.TriggerKey.triggerKey;
import com.fasterxml.jackson.core.type.TypeReference;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.TimeZone;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.event.GriffinEventManager;
import org.apache.griffin.core.event.JobEvent;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.JobState;
import org.apache.griffin.core.job.entity.JobType;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.apache.griffin.core.job.entity.LivySessionStates.State;
import org.apache.griffin.core.job.entity.StreamingJob;
import org.apache.griffin.core.job.repo.BatchJobRepo;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.job.repo.JobRepo;
import org.apache.griffin.core.job.repo.StreamingJobRepo;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType;
import org.apache.griffin.core.measure.repo.GriffinMeasureRepo;
import org.apache.griffin.core.util.JsonUtil;
import org.apache.griffin.core.util.YarnNetUtil;
import org.json.JSONArray;
import org.json.JSONObject;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.quartz.TriggerKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.env.Environment;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.ResourceAccessException;
@Service
public class JobServiceImpl implements JobService {
private static final Logger LOGGER = LoggerFactory
.getLogger(JobServiceImpl.class);
public static final String GRIFFIN_JOB_ID = "griffinJobId";
private static final int MAX_PAGE_SIZE = 1024;
private static final int DEFAULT_PAGE_SIZE = 10;
static final String START = "start";
static final String STOP = "stop";
@Autowired
@Qualifier("schedulerFactoryBean")
private SchedulerFactoryBean factory;
@Autowired
private JobInstanceRepo instanceRepo;
@Autowired
private Environment env;
@Autowired
private GriffinMeasureRepo measureRepo;
@Autowired
private BatchJobRepo batchJobRepo;
@Autowired
private StreamingJobRepo streamingJobRepo;
@Autowired
private JobRepo<AbstractJob> jobRepo;
@Autowired
private BatchJobOperatorImpl batchJobOp;
@Autowired
private StreamingJobOperatorImpl streamingJobOp;
@Autowired
private GriffinEventManager eventManager;
@Autowired
private LivyTaskSubmitHelper livyTaskSubmitHelper;
public JobServiceImpl() {
}
@Override
public List<AbstractJob> getAliveJobs(String type) {
List<? extends AbstractJob> jobs;
if (JobType.BATCH.getName().equals(type)) {
jobs = batchJobRepo.findByDeleted(false);
} else if (JobType.STREAMING.getName().equals(type)) {
jobs = streamingJobRepo.findByDeleted(false);
} else {
jobs = jobRepo.findByDeleted(false);
}
return getJobDataBeans(jobs);
}
private List<AbstractJob> getJobDataBeans(List<? extends AbstractJob> jobs) {
List<AbstractJob> dataList = new ArrayList<>();
try {
for (AbstractJob job : jobs) {
JobState jobState = genJobState(job);
job.setJobState(jobState);
dataList.add(job);
}
} catch (SchedulerException e) {
LOGGER.error("Failed to get RUNNING jobs.", e);
throw new GriffinException
.ServiceException("Failed to get RUNNING jobs.", e);
}
return dataList;
}
@Override
public AbstractJob addJob(AbstractJob job) throws Exception {
JobEvent jobEvent = JobEvent.yieldJobEventBeforeCreation(job);
eventManager.notifyListeners(jobEvent);
Long measureId = job.getMeasureId();
GriffinMeasure measure = getMeasureIfValid(measureId);
JobOperator op = getJobOperator(measure.getProcessType());
AbstractJob jobSaved = op.add(job, measure);
jobEvent = JobEvent.yieldJobEventAfterCreation(jobSaved);
eventManager.notifyListeners(jobEvent);
return jobSaved;
}
@Override
public AbstractJob getJobConfig(Long jobId) {
AbstractJob job = jobRepo.findByIdAndDeleted(jobId, false);
if (job == null) {
LOGGER.warn("Job id {} does not exist.", jobId);
throw new GriffinException
.NotFoundException(JOB_ID_DOES_NOT_EXIST);
}
return job;
}
/**
* @param jobId job id
* @param action job operation: start job, stop job
*/
@Override
public AbstractJob onAction(Long jobId, String action) throws Exception {
AbstractJob job = jobRepo.findByIdAndDeleted(jobId, false);
validateJobExist(job);
JobOperator op = getJobOperator(job);
doAction(action, job, op);
JobState jobState = genJobState(job, action);
job.setJobState(jobState);
return job;
}
private void doAction(String action, AbstractJob job, JobOperator op)
throws Exception {
switch (action) {
case START:
op.start(job);
break;
case STOP:
op.stop(job);
break;
default:
throw new GriffinException
.NotFoundException(NO_SUCH_JOB_ACTION);
}
}
/**
* logically delete
* 1. pause these jobs
* 2. set these jobs as deleted status
*
* @param jobId griffin job id
*/
@Override
public void deleteJob(Long jobId) throws SchedulerException {
AbstractJob job = jobRepo.findByIdAndDeleted(jobId, false);
validateJobExist(job);
JobEvent event = JobEvent.yieldJobEventBeforeRemoval(job);
eventManager.notifyListeners(event);
JobOperator op = getJobOperator(job);
op.delete(job);
event = JobEvent.yieldJobEventAfterRemoval(job);
eventManager.notifyListeners(event);
}
/**
* logically delete
*
* @param name griffin job name which may not be unique.
*/
@Override
public void deleteJob(String name) throws SchedulerException {
List<AbstractJob> jobs = jobRepo.findByJobNameAndDeleted(name, false);
if (CollectionUtils.isEmpty(jobs)) {
LOGGER.warn("There is no job with '{}' name.", name);
throw new GriffinException
.NotFoundException(JOB_NAME_DOES_NOT_EXIST);
}
for (AbstractJob job : jobs) {
JobEvent event = JobEvent.yieldJobEventBeforeRemoval(job);
eventManager.notifyListeners(event);
JobOperator op = getJobOperator(job);
op.delete(job);
event = JobEvent.yieldJobEventAfterRemoval(job);
eventManager.notifyListeners(event);
}
}
@Override
public List<JobInstanceBean> findInstancesOfJob(
Long jobId,
int page,
int size) {
AbstractJob job = jobRepo.findByIdAndDeleted(jobId, false);
if (job == null) {
LOGGER.warn("Job id {} does not exist.", jobId);
throw new GriffinException
.NotFoundException(JOB_ID_DOES_NOT_EXIST);
}
size = size > MAX_PAGE_SIZE ? MAX_PAGE_SIZE : size;
size = size <= 0 ? DEFAULT_PAGE_SIZE : size;
Pageable pageable = new PageRequest(page, size,
Sort.Direction.DESC, "tms");
List<JobInstanceBean> instances = instanceRepo.findByJobId(jobId,
pageable);
return updateState(instances);
}
@Override
public JobInstanceBean findInstance(Long id) {
JobInstanceBean bean = instanceRepo.findByInstanceId(id);
if (bean == null) {
LOGGER.warn("Instance id {} does not exist.", id);
throw new GriffinException
.NotFoundException(INSTANCE_ID_DOES_NOT_EXIST);
}
return bean;
}
private List<JobInstanceBean> updateState(List<JobInstanceBean> instances) {
for (JobInstanceBean instance : instances) {
State state = instance.getState();
if (state.equals(UNKNOWN) || isActive(state)) {
syncInstancesOfJob(instance);
}
}
return instances;
}
@Override
public List<JobInstanceBean> findInstancesByTriggerKey(String triggerKey) {
return instanceRepo.findByTriggerKey(triggerKey);
}
/**
* a job is regard as healthy job when its latest instance is in healthy
* state.
*
* @return job healthy statistics
*/
@Override
public JobHealth getHealthInfo() {
JobHealth jobHealth = new JobHealth();
List<AbstractJob> jobs = jobRepo.findByDeleted(false);
for (AbstractJob job : jobs) {
JobOperator op = getJobOperator(job);
try {
jobHealth = op.getHealth(jobHealth, job);
} catch (SchedulerException e) {
LOGGER.error("Job schedule exception. {}", e);
throw new GriffinException
.ServiceException("Fail to Get HealthInfo", e);
}
}
return jobHealth;
}
@Scheduled(fixedDelayString = "${jobInstance.expired.milliseconds}")
public void deleteExpiredJobInstance() {
Long timeMills = System.currentTimeMillis();
List<JobInstanceBean> instances = instanceRepo
.findByExpireTmsLessThanEqual
(timeMills);
if (!batchJobOp.pauseJobInstances(instances)) {
LOGGER.error("Pause job failure.");
return;
}
int count = instanceRepo.deleteByExpireTimestamp(timeMills);
LOGGER.info("Delete {} expired job instances.", count);
}
private void validateJobExist(AbstractJob job) {
if (job == null) {
LOGGER.warn("Griffin job does not exist.");
throw new GriffinException.NotFoundException(JOB_ID_DOES_NOT_EXIST);
}
}
private JobOperator getJobOperator(AbstractJob job) {
if (job instanceof BatchJob) {
return batchJobOp;
} else if (job instanceof StreamingJob) {
return streamingJobOp;
}
throw new GriffinException.BadRequestException
(JOB_TYPE_DOES_NOT_SUPPORT);
}
private JobOperator getJobOperator(ProcessType type) {
if (type == BATCH) {
return batchJobOp;
} else if (type == STREAMING) {
return streamingJobOp;
}
throw new GriffinException.BadRequestException
(MEASURE_TYPE_DOES_NOT_SUPPORT);
}
TriggerKey getTriggerKeyIfValid(String qName, String qGroup) throws
SchedulerException {
TriggerKey triggerKey = triggerKey(qName, qGroup);
if (factory.getScheduler().checkExists(triggerKey)) {
throw new GriffinException.ConflictException
(QUARTZ_JOB_ALREADY_EXIST);
}
return triggerKey;
}
List<? extends Trigger> getTriggers(String name, String group) throws
SchedulerException {
if (name == null || group == null) {
return null;
}
JobKey jobKey = new JobKey(name, group);
Scheduler scheduler = factory.getScheduler();
return scheduler.getTriggersOfJob(jobKey);
}
private JobState genJobState(AbstractJob job, String action) throws
SchedulerException {
JobOperator op = getJobOperator(job);
JobState state = op.getState(job, action);
job.setJobState(state);
return state;
}
private JobState genJobState(AbstractJob job) throws SchedulerException {
return genJobState(job, null);
}
void addJob(TriggerKey tk, AbstractJob job, ProcessType type) throws
Exception {
JobDetail jobDetail = addJobDetail(tk, job);
Trigger trigger = genTriggerInstance(tk, jobDetail, job, type);
factory.getScheduler().scheduleJob(trigger);
}
String getQuartzName(AbstractJob job) {
return job.getJobName() + "_" + System.currentTimeMillis();
}
String getQuartzGroup() {
return "BA";
}
boolean isValidJobName(String jobName) {
if (StringUtils.isEmpty(jobName)) {
LOGGER.warn("Job name cannot be empty.");
return false;
}
int size = jobRepo.countByJobNameAndDeleted(jobName, false);
if (size > 0) {
LOGGER.warn("Job name already exits.");
return false;
}
return true;
}
private GriffinMeasure getMeasureIfValid(Long measureId) {
GriffinMeasure measure = measureRepo.findByIdAndDeleted(measureId,
false);
if (measure == null) {
LOGGER.warn("The measure id {} isn't valid. Maybe it doesn't " +
"exist or is external measure type.",
measureId);
throw new GriffinException.BadRequestException(INVALID_MEASURE_ID);
}
return measure;
}
private Trigger genTriggerInstance(TriggerKey tk, JobDetail jd, AbstractJob
job, ProcessType type) {
TriggerBuilder builder = newTrigger().withIdentity(tk).forJob(jd);
if (type == BATCH) {
TimeZone timeZone = getTimeZone(job.getTimeZone());
return builder.withSchedule(cronSchedule(job.getCronExpression())
.inTimeZone(timeZone)).build();
} else if (type == STREAMING) {
return builder.startNow().withSchedule(simpleSchedule()
.withRepeatCount(0)).build();
}
throw new GriffinException.BadRequestException
(JOB_TYPE_DOES_NOT_SUPPORT);
}
private JobDetail addJobDetail(TriggerKey triggerKey, AbstractJob job)
throws SchedulerException {
Scheduler scheduler = factory.getScheduler();
JobKey jobKey = jobKey(triggerKey.getName(), triggerKey.getGroup());
JobDetail jobDetail;
Boolean isJobKeyExist = scheduler.checkExists(jobKey);
if (isJobKeyExist) {
jobDetail = scheduler.getJobDetail(jobKey);
} else {
jobDetail = newJob(JobInstance.class).storeDurably().withIdentity
(jobKey).build();
}
setJobDataMap(jobDetail, job);
scheduler.addJob(jobDetail, isJobKeyExist);
return jobDetail;
}
private void setJobDataMap(JobDetail jd, AbstractJob job) {
JobDataMap jobDataMap = jd.getJobDataMap();
jobDataMap.put(GRIFFIN_JOB_ID, job.getId().toString());
}
/**
* deleteJobsRelateToMeasure
* 1. search jobs related to measure
* 2. deleteJob
*
* @param measureId measure id
*/
public void deleteJobsRelateToMeasure(Long measureId) throws
SchedulerException {
List<AbstractJob> jobs = jobRepo.findByMeasureIdAndDeleted(measureId,
false);
if (CollectionUtils.isEmpty(jobs)) {
LOGGER.info("Measure id {} has no related jobs.", measureId);
return;
}
for (AbstractJob job : jobs) {
JobOperator op = getJobOperator(job);
op.delete(job);
}
}
@Scheduled(fixedDelayString = "${jobInstance.fixedDelay.in.milliseconds}")
public void syncInstancesOfAllJobs() {
LivySessionStates.State[] states = {STARTING, NOT_STARTED, RECOVERING,
IDLE, RUNNING, BUSY};
List<JobInstanceBean> beans = instanceRepo.findByActiveState(states);
for (JobInstanceBean jobInstance : beans) {
syncInstancesOfJob(jobInstance);
}
}
/**
* call livy to update part of job instance table data associated with group
* and jobName in mysql.
*
* @param instance job instance livy info
*/
private void syncInstancesOfJob(JobInstanceBean instance) {
if (instance.getSessionId() == null) {
return;
}
String uri = env.getProperty("livy.uri") + "/"
+ instance.getSessionId();
TypeReference<HashMap<String, Object>> type =
new TypeReference<HashMap<String, Object>>() {
};
try {
String resultStr = livyTaskSubmitHelper.getFromLivy(uri);
LOGGER.info(resultStr);
HashMap<String, Object> resultMap = JsonUtil.toEntity(resultStr,
type);
setJobInstanceIdAndUri(instance, resultMap);
} catch (ResourceAccessException e) {
LOGGER.error("Your url may be wrong. Please check {}.\n {}", uri, e
.getMessage());
} catch (HttpClientErrorException e) {
LOGGER.warn("sessionId({}) appId({}) {}.", instance.getSessionId(),
instance.getAppId(), e.getMessage());
setStateByYarn(instance, e);
livyTaskSubmitHelper.decreaseCurTaskNum(instance.getId());
} catch (Exception e) {
LOGGER.error(e.getMessage());
}
}
private void setStateByYarn(JobInstanceBean instance,
HttpClientErrorException e) {
if (!checkStatus(instance, e)) {
int code = e.getStatusCode().value();
boolean match = (code == 400 || code == 404)
&& instance.getAppId() != null;
//this means your url is correct,but your param is wrong or livy
//session may be overdue.
if (match) {
setStateByYarn(instance);
}
}
}
/**
* Check instance status in case that session id is overdue and app id is
* null and so we cannot update instance state
* .
*
* @param instance job instance bean
* @param e HttpClientErrorException
* @return boolean
*/
private boolean checkStatus(JobInstanceBean instance,
HttpClientErrorException e) {
int code = e.getStatusCode().value();
String appId = instance.getAppId();
String responseBody = e.getResponseBodyAsString();
Long sessionId = instance.getSessionId();
sessionId = sessionId != null ? sessionId : -1;
// If code is 404 and appId is null and response body is like 'Session
// {id} not found',this means instance may not be scheduled for
// a long time by spark for too many tasks. It may be dead.
if (code == 404 && appId == null && (responseBody != null &&
responseBody.contains(sessionId.toString()))) {
instance.setState(DEAD);
instance.setDeleted(true);
instanceRepo.save(instance);
return true;
}
return false;
}
private void setStateByYarn(JobInstanceBean instance) {
LOGGER.warn("Spark session {} may be overdue! " +
"Now we use yarn to update state.", instance.getSessionId());
String yarnUrl = env.getProperty("yarn.uri");
boolean success = YarnNetUtil.update(yarnUrl, instance);
if (!success) {
if (instance.getState().equals(UNKNOWN)) {
return;
}
instance.setState(UNKNOWN);
}
instanceRepo.save(instance);
}
private void setJobInstanceIdAndUri(JobInstanceBean instance, HashMap<String
, Object> resultMap) {
if (resultMap != null) {
Object state = resultMap.get("state");
Object appId = resultMap.get("appId");
instance.setState(state == null ? null : LivySessionStates.State
.valueOf(state.toString().toUpperCase
()));
instance.setAppId(appId == null ? null : appId.toString());
instance.setAppUri(appId == null ? null : env
.getProperty("yarn.uri") + "/cluster/app/" + appId);
instanceRepo.save(instance);
// If Livy returns to success or dead, task execution completes one,TaskNum--
if (instance.getState().equals(SUCCESS) || instance.getState().equals(DEAD)) {
livyTaskSubmitHelper.decreaseCurTaskNum(instance.getSessionId());
}
}
}
public Boolean isJobHealthy(Long jobId) {
Pageable pageable = new PageRequest(0, 1, Sort.Direction.DESC, "tms");
List<JobInstanceBean> instances = instanceRepo.findByJobId(jobId,
pageable);
return !CollectionUtils.isEmpty(instances) && LivySessionStates
.isHealthy(instances.get(0).getState());
}
@Override
public String getJobHdfsSinksPath(String jobName, long timestamp) {
List<AbstractJob> jobList = jobRepo.findByJobNameAndDeleted(
jobName, false);
if (jobList.size() == 0) {
return null;
}
if (jobList.get(0).getType().toLowerCase().equals("batch")) {
return getSinksPath(ENV_BATCH)
+ "/" + jobName + "/" + timestamp + "";
}
return getSinksPath(ENV_STREAMING)
+ "/" + jobName + "/" + timestamp + "";
}
private String getSinksPath(String jsonString) {
try {
JSONObject obj = new JSONObject(jsonString);
JSONArray persistArray = obj.getJSONArray("sinks");
for (int i = 0; i < persistArray.length(); i++) {
Object type = persistArray.getJSONObject(i).get("type");
if (type instanceof String
&& "hdfs".equalsIgnoreCase(String.valueOf(type))) {
return persistArray.getJSONObject(i)
.getJSONObject("config").getString("path");
}
}
return null;
} catch (Exception ex) {
LOGGER.error("Fail to get Persist path from {}", jsonString, ex);
return null;
}
}
@Override
public String triggerJobById(Long id) throws SchedulerException {
AbstractJob job = jobRepo.findByIdAndDeleted(id, false);
validateJobExist(job);
Scheduler scheduler = factory.getScheduler();
JobKey jobKey = jobKey(job.getName(), job.getGroup());
if (scheduler.checkExists(jobKey)) {
Trigger trigger = TriggerBuilder.newTrigger()
.forJob(jobKey)
.startNow()
.build();
scheduler.scheduleJob(trigger);
return trigger.getKey().toString();
} else {
throw new GriffinException.NotFoundException(JOB_ID_DOES_NOT_EXIST);
}
}
}
| 4,077 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/JobController.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.util.FSUtil;
import org.quartz.SchedulerException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/v1")
public class JobController {
@Autowired
private JobService jobService;
@RequestMapping(value = "/jobs", method = RequestMethod.GET)
public List<AbstractJob> getJobs(@RequestParam(value = "type",
defaultValue = "") String type) {
return jobService.getAliveJobs(type);
}
@RequestMapping(value = "/jobs", method = RequestMethod.POST)
@ResponseStatus(HttpStatus.CREATED)
public AbstractJob addJob(@RequestBody AbstractJob job) throws Exception {
return jobService.addJob(job);
}
@RequestMapping(value = "/jobs/config")
public AbstractJob getJobConfig(@RequestParam("jobId") Long jobId) {
return jobService.getJobConfig(jobId);
}
@RequestMapping(value = "/jobs/{id}", method = RequestMethod.PUT)
@ResponseStatus(HttpStatus.OK)
public AbstractJob onActions(
@PathVariable("id") Long jobId,
@RequestParam String action) throws Exception {
return jobService.onAction(jobId, action);
}
@RequestMapping(value = "/jobs", method = RequestMethod.DELETE)
@ResponseStatus(HttpStatus.NO_CONTENT)
public void deleteJob(@RequestParam("jobName") String jobName)
throws SchedulerException {
jobService.deleteJob(jobName);
}
@RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE)
@ResponseStatus(HttpStatus.NO_CONTENT)
public void deleteJob(@PathVariable("id") Long id)
throws SchedulerException {
jobService.deleteJob(id);
}
@RequestMapping(value = "/jobs/instances", method = RequestMethod.GET)
public List<JobInstanceBean> findInstancesOfJob(
@RequestParam("jobId") Long id,
@RequestParam("page") int page,
@RequestParam("size") int size) {
return jobService.findInstancesOfJob(id, page, size);
}
@RequestMapping(value = "/jobs/instances/{instanceId}", method = RequestMethod.GET)
public JobInstanceBean findInstanceByInstanceId(@PathVariable("instanceId") Long id) {
return jobService.findInstance(id);
}
@RequestMapping(value = "/jobs/health", method = RequestMethod.GET)
public JobHealth getHealthInfo() {
return jobService.getHealthInfo();
}
@RequestMapping(path = "/jobs/download", method = RequestMethod.GET)
public ResponseEntity<Resource> download(
@RequestParam("jobName") String jobName,
@RequestParam("ts") long timestamp)
throws Exception {
String path = jobService.getJobHdfsSinksPath(jobName, timestamp);
InputStreamResource resource = new InputStreamResource(
FSUtil.getMissSampleInputStream(path));
return ResponseEntity.ok().
header("content-disposition",
"attachment; filename = sampleMissingData.json")
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource);
}
@RequestMapping(value = "/jobs/trigger/{id}", method = RequestMethod.POST)
@ResponseStatus(HttpStatus.OK)
public Map<String, Object> triggerJob(@PathVariable("id") Long id, @RequestBody(required = false) String request) throws SchedulerException {
return Collections.singletonMap("triggerKey", jobService.triggerJobById(id));
}
@RequestMapping(value = "jobs/triggerKeys/{triggerKey:.+}", method = RequestMethod.GET)
public List<JobInstanceBean> findInstanceByTriggerKey(@PathVariable("triggerKey") String triggerKey) {
return jobService.findInstancesByTriggerKey(triggerKey);
}
}
| 4,078 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/BatchJobOperatorImpl.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_CONNECTOR_NAME;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_CRON_EXPRESSION;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.INVALID_JOB_NAME;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_IS_NOT_IN_PAUSED_STATUS;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_IS_NOT_SCHEDULED;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.JOB_KEY_DOES_NOT_EXIST;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.MISSING_BASELINE_CONFIG;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
import static org.quartz.CronExpression.isValidExpression;
import static org.quartz.JobKey.jobKey;
import static org.quartz.Trigger.TriggerState;
import static org.quartz.Trigger.TriggerState.BLOCKED;
import static org.quartz.Trigger.TriggerState.NORMAL;
import static org.quartz.Trigger.TriggerState.PAUSED;
import static org.quartz.TriggerKey.triggerKey;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.BatchJob;
import org.apache.griffin.core.job.entity.JobDataSegment;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.JobState;
import org.apache.griffin.core.job.entity.LivySessionStates;
import org.apache.griffin.core.job.repo.BatchJobRepo;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.measure.entity.DataSource;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
@Service
public class BatchJobOperatorImpl implements JobOperator {
private static final Logger LOGGER = LoggerFactory
.getLogger(BatchJobOperatorImpl.class);
@Autowired
@Qualifier("schedulerFactoryBean")
private SchedulerFactoryBean factory;
@Autowired
private JobInstanceRepo instanceRepo;
@Autowired
private BatchJobRepo batchJobRepo;
@Autowired
private JobServiceImpl jobService;
@Override
@Transactional(rollbackFor = Exception.class)
public AbstractJob add(AbstractJob job, GriffinMeasure measure)
throws Exception {
validateParams(job, measure);
String qName = jobService.getQuartzName(job);
String qGroup = jobService.getQuartzGroup();
TriggerKey triggerKey = jobService.getTriggerKeyIfValid(qName, qGroup);
BatchJob batchJob = genBatchJobBean(job, qName, qGroup);
batchJob = batchJobRepo.save(batchJob);
jobService.addJob(triggerKey, batchJob, BATCH);
return job;
}
private BatchJob genBatchJobBean(AbstractJob job,
String qName,
String qGroup) {
BatchJob batchJob = (BatchJob) job;
batchJob.setMetricName(job.getJobName());
batchJob.setGroup(qGroup);
batchJob.setName(qName);
return batchJob;
}
/**
* all states: BLOCKED COMPLETE ERROR NONE NORMAL PAUSED
* to start states: PAUSED
* to stop states: BLOCKED NORMAL
*
* @param job streaming job
*/
@Override
public void start(AbstractJob job) {
String name = job.getName();
String group = job.getGroup();
TriggerState state = getTriggerState(name, group);
if (state == null) {
throw new GriffinException.BadRequestException(
JOB_IS_NOT_SCHEDULED);
}
/* If job is not in paused state,we can't start it
as it may be RUNNING.*/
if (state != PAUSED) {
throw new GriffinException.BadRequestException
(JOB_IS_NOT_IN_PAUSED_STATUS);
}
JobKey jobKey = jobKey(name, group);
try {
factory.getScheduler().resumeJob(jobKey);
} catch (SchedulerException e) {
throw new GriffinException.ServiceException(
"Failed to start job.", e);
}
}
@Override
public void stop(AbstractJob job) {
pauseJob((BatchJob) job, false);
}
@Override
@Transactional
public void delete(AbstractJob job) {
pauseJob((BatchJob) job, true);
}
@Override
public JobHealth getHealth(JobHealth jobHealth, AbstractJob job)
throws SchedulerException {
List<? extends Trigger> triggers = jobService
.getTriggers(job.getName(), job.getGroup());
if (!CollectionUtils.isEmpty(triggers)) {
jobHealth.setJobCount(jobHealth.getJobCount() + 1);
if (jobService.isJobHealthy(job.getId())) {
jobHealth.setHealthyJobCount(
jobHealth.getHealthyJobCount() + 1);
}
}
return jobHealth;
}
@Override
public JobState getState(AbstractJob job, String action)
throws SchedulerException {
JobState jobState = new JobState();
Scheduler scheduler = factory.getScheduler();
if (job.getGroup() == null || job.getName() == null) {
return null;
}
TriggerKey triggerKey = triggerKey(job.getName(), job.getGroup());
TriggerState triggerState = scheduler.getTriggerState(triggerKey);
jobState.setState(triggerState.toString());
jobState.setToStart(getStartStatus(triggerState));
jobState.setToStop(getStopStatus(triggerState));
setTriggerTime(job, jobState);
return jobState;
}
private void setTriggerTime(AbstractJob job, JobState jobState)
throws SchedulerException {
List<? extends Trigger> triggers = jobService
.getTriggers(job.getName(), job.getGroup());
// If triggers are empty, in Griffin it means job is completed whose
// trigger state is NONE or not scheduled.
if (CollectionUtils.isEmpty(triggers)) {
return;
}
Trigger trigger = triggers.get(0);
Date nextFireTime = trigger.getNextFireTime();
Date previousFireTime = trigger.getPreviousFireTime();
jobState.setNextFireTime(nextFireTime != null ?
nextFireTime.getTime() : -1);
jobState.setPreviousFireTime(previousFireTime != null ?
previousFireTime.getTime() : -1);
}
/**
* only PAUSED state of job can be started
*
* @param state job state
* @return true: job can be started, false: job is running which cannot be
* started
*/
private boolean getStartStatus(TriggerState state) {
return state == PAUSED;
}
/**
* only NORMAL or BLOCKED state of job can be started
*
* @param state job state
* @return true: job can be stopped, false: job is running which cannot be
* stopped
*/
private boolean getStopStatus(TriggerState state) {
return state == NORMAL || state == BLOCKED;
}
private TriggerState getTriggerState(String name, String group) {
try {
List<? extends Trigger> triggers = jobService.getTriggers(name,
group);
if (CollectionUtils.isEmpty(triggers)) {
return null;
}
TriggerKey key = triggers.get(0).getKey();
return factory.getScheduler().getTriggerState(key);
} catch (SchedulerException e) {
LOGGER.error("Failed to delete job", e);
throw new GriffinException
.ServiceException("Failed to delete job", e);
}
}
/**
* @param job griffin job
* @param delete if job needs to be deleted,set isNeedDelete true,otherwise
* it just will be paused.
*/
private void pauseJob(BatchJob job, boolean delete) {
try {
pauseJob(job.getGroup(), job.getName());
pausePredicateJob(job);
job.setDeleted(delete);
batchJobRepo.save(job);
} catch (Exception e) {
LOGGER.error("Job schedule happens exception.", e);
throw new GriffinException.ServiceException("Job schedule " +
"happens exception.", e);
}
}
private void pausePredicateJob(BatchJob job) throws SchedulerException {
List<JobInstanceBean> instances = instanceRepo.findByJobId(job.getId());
for (JobInstanceBean instance : instances) {
if (!instance.isPredicateDeleted()) {
deleteJob(instance.getPredicateGroup(), instance
.getPredicateName());
instance.setPredicateDeleted(true);
if (instance.getState().equals(LivySessionStates.State.FINDING)) {
instance.setState(LivySessionStates.State.NOT_FOUND);
}
}
}
}
public void deleteJob(String group, String name) throws SchedulerException {
Scheduler scheduler = factory.getScheduler();
JobKey jobKey = new JobKey(name, group);
if (!scheduler.checkExists(jobKey)) {
LOGGER.info("Job({},{}) does not exist.", jobKey.getGroup(), jobKey
.getName());
return;
}
scheduler.deleteJob(jobKey);
}
private void pauseJob(String group, String name) throws SchedulerException {
if (StringUtils.isEmpty(group) || StringUtils.isEmpty(name)) {
return;
}
Scheduler scheduler = factory.getScheduler();
JobKey jobKey = new JobKey(name, group);
if (!scheduler.checkExists(jobKey)) {
LOGGER.warn("Job({},{}) does not exist.", jobKey.getGroup(), jobKey
.getName());
throw new GriffinException.NotFoundException
(JOB_KEY_DOES_NOT_EXIST);
}
scheduler.pauseJob(jobKey);
}
public boolean pauseJobInstances(List<JobInstanceBean> instances) {
if (CollectionUtils.isEmpty(instances)) {
return true;
}
List<JobInstanceBean> deletedInstances = new ArrayList<>();
boolean pauseStatus = true;
for (JobInstanceBean instance : instances) {
boolean status = pauseJobInstance(instance, deletedInstances);
pauseStatus = pauseStatus && status;
}
instanceRepo.saveAll(deletedInstances);
return pauseStatus;
}
private boolean pauseJobInstance(JobInstanceBean instance,
List<JobInstanceBean> deletedInstances) {
String pGroup = instance.getPredicateGroup();
String pName = instance.getPredicateName();
try {
if (!instance.isPredicateDeleted()) {
deleteJob(pGroup, pName);
instance.setPredicateDeleted(true);
deletedInstances.add(instance);
}
} catch (SchedulerException e) {
LOGGER.error("Failed to pause predicate job({},{}).", pGroup,
pName);
return false;
}
return true;
}
private void validateParams(AbstractJob job, GriffinMeasure measure) {
if (!jobService.isValidJobName(job.getJobName())) {
throw new GriffinException.BadRequestException(INVALID_JOB_NAME);
}
if (!isValidCronExpression(job.getCronExpression())) {
throw new GriffinException.BadRequestException
(INVALID_CRON_EXPRESSION);
}
if (!isValidBaseLine(job.getSegments())) {
throw new GriffinException.BadRequestException
(MISSING_BASELINE_CONFIG);
}
List<String> names = getConnectorNames(measure);
if (!isValidConnectorNames(job.getSegments(), names)) {
throw new GriffinException.BadRequestException
(INVALID_CONNECTOR_NAME);
}
}
private boolean isValidCronExpression(String cronExpression) {
if (StringUtils.isEmpty(cronExpression)) {
LOGGER.warn("Cron Expression is empty.");
return false;
}
if (!isValidExpression(cronExpression)) {
LOGGER.warn("Cron Expression is invalid: {}", cronExpression);
return false;
}
return true;
}
private boolean isValidBaseLine(List<JobDataSegment> segments) {
assert segments != null;
for (JobDataSegment jds : segments) {
if (jds.isAsTsBaseline()) {
return true;
}
}
LOGGER.warn("Please set segment timestamp baseline " +
"in as.baseline field.");
return false;
}
private boolean isValidConnectorNames(List<JobDataSegment> segments,
List<String> names) {
assert segments != null;
Set<String> sets = new HashSet<>();
for (JobDataSegment segment : segments) {
String dcName = segment.getDataConnectorName();
sets.add(dcName);
boolean exist = names.stream().anyMatch(name -> name.equals
(dcName));
if (!exist) {
LOGGER.warn("Param {} is a illegal string. " +
"Please input one of strings in {}.", dcName, names);
return false;
}
}
if (sets.size() < segments.size()) {
LOGGER.warn("Connector names in job data segment " +
"cannot duplicate.");
return false;
}
return true;
}
private List<String> getConnectorNames(GriffinMeasure measure) {
Set<String> sets = new HashSet<>();
List<DataSource> sources = measure.getDataSources();
for (DataSource source : sources) {
sets.add(source.getConnector().getName());
}
if (sets.size() < sources.size()) {
LOGGER.warn("Connector names cannot be repeated.");
return Collections.emptyList();
}
return new ArrayList<>(sets);
}
}
| 4,079 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/JobInstance.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.exception.GriffinExceptionMessage.QUARTZ_JOB_ALREADY_EXIST;
import static org.apache.griffin.core.job.JobServiceImpl.GRIFFIN_JOB_ID;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.FINDING;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.STREAMING;
import static org.apache.griffin.core.util.JsonUtil.toEntity;
import static org.apache.griffin.core.util.JsonUtil.toJson;
import static org.quartz.JobBuilder.newJob;
import static org.quartz.JobKey.jobKey;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
import static org.quartz.TriggerBuilder.newTrigger;
import static org.quartz.TriggerKey.triggerKey;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.exception.GriffinException;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.apache.griffin.core.job.entity.JobDataSegment;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.job.entity.SegmentRange;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.job.repo.JobRepo;
import org.apache.griffin.core.measure.entity.DataConnector;
import org.apache.griffin.core.measure.entity.DataSource;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType;
import org.apache.griffin.core.measure.repo.GriffinMeasureRepo;
import org.apache.griffin.core.util.TimeUtil;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobKey;
import org.quartz.PersistJobDataAfterExecution;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.TriggerKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.transaction.annotation.Transactional;
@PersistJobDataAfterExecution
@DisallowConcurrentExecution
public class JobInstance implements Job {
private static final Logger LOGGER = LoggerFactory
.getLogger(JobInstance.class);
public static final String MEASURE_KEY = "measure";
public static final String PREDICATES_KEY = "predicts";
public static final String PREDICATE_JOB_NAME = "predicateJobName";
private static final String TRIGGER_KEY = "trigger";
static final String JOB_NAME = "jobName";
static final String PATH_CONNECTOR_CHARACTER = ",";
public static final String INTERVAL = "interval";
public static final String REPEAT = "repeat";
public static final String CHECK_DONEFILE_SCHEDULE =
"checkdonefile.schedule";
@Autowired
@Qualifier("schedulerFactoryBean")
private SchedulerFactoryBean factory;
@Autowired
private GriffinMeasureRepo measureRepo;
@Autowired
private JobRepo<AbstractJob> jobRepo;
@Autowired
private JobInstanceRepo instanceRepo;
@Autowired
private Environment env;
private GriffinMeasure measure;
private AbstractJob job;
private List<SegmentPredicate> mPredicates;
private Long jobStartTime;
@Override
@Transactional
public void execute(JobExecutionContext context) {
try {
initParam(context);
setSourcesPartitionsAndPredicates(measure.getDataSources());
createJobInstance(job.getConfigMap());
} catch (Exception e) {
LOGGER.error("Create predicate job failure.", e);
}
}
private void initParam(JobExecutionContext context)
throws SchedulerException {
mPredicates = new ArrayList<>();
JobDetail jobDetail = context.getJobDetail();
Long jobId = jobDetail.getJobDataMap().getLong(GRIFFIN_JOB_ID);
job = jobRepo.findOne(jobId);
Long measureId = job.getMeasureId();
measure = measureRepo.findOne(measureId);
setJobStartTime(jobDetail);
if (job.getConfigMap() == null) {
job.setConfigMap(new HashMap<>());
}
job.getConfigMap().put(TRIGGER_KEY, context.getTrigger().getKey().toString());
}
@SuppressWarnings("unchecked")
private void setJobStartTime(JobDetail jobDetail)
throws SchedulerException {
Scheduler scheduler = factory.getScheduler();
JobKey jobKey = jobDetail.getKey();
List<Trigger> triggers =
(List<Trigger>) scheduler.getTriggersOfJob(jobKey);
Date triggerTime = triggers.get(0).getPreviousFireTime();
jobStartTime = triggerTime.getTime();
}
private void setSourcesPartitionsAndPredicates(List<DataSource> sources) {
boolean isFirstBaseline = true;
for (JobDataSegment jds : job.getSegments()) {
if (jds.isAsTsBaseline() && isFirstBaseline) {
Long tsOffset = TimeUtil.str2Long(
jds.getSegmentRange().getBegin());
measure.setTimestamp(jobStartTime + tsOffset);
isFirstBaseline = false;
}
for (DataSource ds : sources) {
setDataConnectorPartitions(jds, ds.getConnector());
}
}
}
private void setDataConnectorPartitions(
JobDataSegment jds,
DataConnector dc) {
String dcName = jds.getDataConnectorName();
if (dcName.equals(dc.getName())) {
Long[] sampleTs = genSampleTs(jds.getSegmentRange(), dc);
setConnectorConf(dc, sampleTs);
setConnectorPredicates(dc, sampleTs);
}
}
/**
* split data into several part and get every part start timestamp
*
* @param segRange config of data
* @param dc data connector
* @return split timestamps of data
*/
private Long[] genSampleTs(SegmentRange segRange, DataConnector dc) {
Long offset = TimeUtil.str2Long(segRange.getBegin());
Long range = TimeUtil.str2Long(segRange.getLength());
String unit = dc.getDataUnit();
Long dataUnit = TimeUtil.str2Long(StringUtils.isEmpty(unit) ? dc
.getDefaultDataUnit() : unit);
//offset usually is negative
Long dataStartTime = jobStartTime + offset;
if (range < 0) {
dataStartTime += range;
range = Math.abs(range);
}
if (Math.abs(dataUnit) >= range || dataUnit == 0) {
return new Long[]{dataStartTime};
}
int count = (int) (range / dataUnit);
Long[] timestamps = new Long[count];
for (int index = 0; index < count; index++) {
timestamps[index] = dataStartTime + index * dataUnit;
}
return timestamps;
}
/**
* set data connector predicates
*
* @param dc data connector
* @param sampleTs collection of data split start timestamp
*/
private void setConnectorPredicates(DataConnector dc, Long[] sampleTs) {
List<SegmentPredicate> predicates = dc.getPredicates();
for (SegmentPredicate predicate : predicates) {
genConfMap(predicate.getConfigMap(),
sampleTs,
dc.getDataTimeZone());
//Do not forget to update origin string config
predicate.setConfigMap(predicate.getConfigMap());
mPredicates.add(predicate);
}
}
private void setConnectorConf(DataConnector dc, Long[] sampleTs) {
genConfMap(dc.getConfigMap(), sampleTs, dc.getDataTimeZone());
dc.setConfigMap(dc.getConfigMap());
}
/**
* @param conf config map
* @param sampleTs collection of data split start timestamp
* @return all config data combine,like {"where": "year=2017 AND month=11
* AND dt=15 AND hour=09,year=2017 AND month=11 AND
* dt=15 AND hour=10"}
* or like {"path": "/year=2017/month=11/dt=15/hour=09/_DONE
* ,/year=2017/month=11/dt=15/hour=10/_DONE"}
*/
private void genConfMap(Map<String, Object> conf, Long[] sampleTs, String
timezone) {
if (conf == null) {
LOGGER.warn("Predicate config is null.");
return;
}
for (Map.Entry<String, Object> entry : conf.entrySet()) {
// in case entry value is a json object instead of a string
if (entry.getValue() instanceof String) {
String value = (String) entry.getValue();
Set<String> set = new HashSet<>();
if (StringUtils.isEmpty(value)) {
continue;
}
for (Long timestamp : sampleTs) {
set.add(TimeUtil.format(value, timestamp,
TimeUtil.getTimeZone(timezone)));
}
conf.put(entry.getKey(), StringUtils.join(set,
PATH_CONNECTOR_CHARACTER));
}
}
}
@SuppressWarnings("unchecked")
private void createJobInstance(Map<String, Object> confMap)
throws Exception {
confMap = checkConfMap(confMap != null ? confMap : new HashMap<>());
Map<String, Object> config = (Map<String, Object>) confMap
.get(CHECK_DONEFILE_SCHEDULE);
Long interval = TimeUtil.str2Long((String) config.get(INTERVAL));
Integer repeat = Integer.valueOf(config.get(REPEAT).toString());
String groupName = "PG";
String jobName = job.getJobName() + "_predicate_"
+ System.currentTimeMillis();
TriggerKey tk = triggerKey(jobName, groupName);
if (factory.getScheduler().checkExists(tk)) {
throw new GriffinException.ConflictException(QUARTZ_JOB_ALREADY_EXIST);
}
String triggerKey = (String) confMap.get(TRIGGER_KEY);
saveJobInstance(jobName, groupName, triggerKey);
createJobInstance(tk, interval, repeat, jobName);
}
@SuppressWarnings("unchecked")
Map<String, Object> checkConfMap(Map<String, Object> confMap) {
Map<String, Object> config = (Map<String, Object>) confMap.get
(CHECK_DONEFILE_SCHEDULE);
String interval = env.getProperty("predicate.job.interval");
interval = interval != null ? interval : "5m";
String repeat = env.getProperty("predicate.job.repeat.count");
repeat = repeat != null ? repeat : "12";
if (config == null) {
Map<String, Object> map = new HashMap<>();
map.put(INTERVAL, interval);
map.put(REPEAT, repeat);
confMap.put(CHECK_DONEFILE_SCHEDULE, map);
} else { // replace if interval or repeat is not null
String confRepeat = config.get(REPEAT).toString();
String confInterval = config.get(INTERVAL).toString();
interval = confInterval != null ? confInterval : interval;
repeat = confRepeat != null ? confRepeat : repeat;
config.put(INTERVAL, interval);
config.put(REPEAT, repeat);
}
return confMap;
}
private void saveJobInstance(String pName, String pGroup, String triggerKey) {
ProcessType type = measure.getProcessType() == BATCH ? BATCH :
STREAMING;
Long tms = System.currentTimeMillis();
String expired = env.getProperty("jobInstance.expired.milliseconds");
Long expireTms = Long.valueOf(expired != null ? expired : "604800000")
+ tms;
JobInstanceBean instance = new JobInstanceBean(FINDING, pName, pGroup,
tms, expireTms, type);
instance.setJob(job);
instance.setTriggerKey(triggerKey);
instanceRepo.save(instance);
}
private void createJobInstance(TriggerKey tk, Long interval, Integer
repeatCount, String pJobName) throws Exception {
JobDetail jobDetail = addJobDetail(tk, pJobName);
Trigger trigger = genTriggerInstance(tk, jobDetail, interval,
repeatCount);
factory.getScheduler().scheduleJob(trigger);
}
private Trigger genTriggerInstance(TriggerKey tk, JobDetail jd, Long
interval, Integer repeatCount) {
return newTrigger().withIdentity(tk).forJob(jd).startNow()
.withSchedule(simpleSchedule().withIntervalInMilliseconds
(interval).withRepeatCount(repeatCount))
.build();
}
private JobDetail addJobDetail(TriggerKey tk, String pJobName)
throws SchedulerException, IOException {
Scheduler scheduler = factory.getScheduler();
JobKey jobKey = jobKey(tk.getName(), tk.getGroup());
JobDetail jobDetail;
Boolean isJobKeyExist = scheduler.checkExists(jobKey);
if (isJobKeyExist) {
jobDetail = scheduler.getJobDetail(jobKey);
} else {
jobDetail = newJob(SparkSubmitJob.class)
.storeDurably()
.withIdentity(jobKey)
.build();
}
setJobDataMap(jobDetail, pJobName);
scheduler.addJob(jobDetail, isJobKeyExist);
return jobDetail;
}
private void setJobDataMap(JobDetail jobDetail, String pJobName)
throws IOException {
JobDataMap dataMap = jobDetail.getJobDataMap();
preProcessMeasure();
String result = toJson(measure);
dataMap.put(MEASURE_KEY, result);
dataMap.put(PREDICATES_KEY, toJson(mPredicates));
dataMap.put(JOB_NAME, job.getJobName());
dataMap.put(PREDICATE_JOB_NAME, pJobName);
}
private void preProcessMeasure() throws IOException {
for (DataSource source : measure.getDataSources()) {
Map cacheMap = source.getCheckpointMap();
//to skip batch job
if (cacheMap == null) {
return;
}
String cache = toJson(cacheMap);
cache = cache.replaceAll("\\$\\{JOB_NAME}", job.getJobName());
cache = cache.replaceAll("\\$\\{SOURCE_NAME}", source.getName());
cache = cache.replaceAll("\\$\\{TARGET_NAME}", source.getName());
cacheMap = toEntity(cache, Map.class);
source.setCheckpointMap(cacheMap);
}
}
}
| 4,080 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.job.JobInstance.PATH_CONNECTOR_CHARACTER;
import java.io.IOException;
import java.util.Map;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.util.FSUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileExistPredicator implements Predicator {
private static final Logger LOGGER = LoggerFactory
.getLogger(FileExistPredicator.class);
private static final String PREDICT_PATH = "path";
private static final String PREDICT_ROOT_PATH = "root.path";
private SegmentPredicate predicate;
public FileExistPredicator(SegmentPredicate predicate) {
this.predicate = predicate;
}
@Override
public boolean predicate() throws IOException {
Map<String, Object> config = predicate.getConfigMap();
String[] paths = null;
String rootPath = null;
if (config != null && !StringUtils.isEmpty((String) config.get(PREDICT_PATH))) {
paths = ((String) config.get(PREDICT_PATH))
.split(PATH_CONNECTOR_CHARACTER);
rootPath = (String) config.get(PREDICT_ROOT_PATH);
}
if (ArrayUtils.isEmpty(paths) || StringUtils.isEmpty(rootPath)) {
LOGGER.error("Predicate path is null.Please check predicates " +
"config root.path and path.");
throw new NullPointerException();
}
for (String path : paths) {
String hdfsPath = rootPath + path;
LOGGER.info("Predicate path: {}", hdfsPath);
if (!FSUtil.isFileExist(hdfsPath)) {
LOGGER.info("Predicate path: " + hdfsPath + " doesn't exist.");
return false;
}
LOGGER.info("Predicate path: " + hdfsPath + " exists.");
}
return true;
}
}
| 4,081 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job;
import static org.apache.griffin.core.config.EnvConfig.ENV_BATCH;
import static org.apache.griffin.core.config.EnvConfig.ENV_STREAMING;
import static org.apache.griffin.core.config.PropertiesConfig.livyConfMap;
import static org.apache.griffin.core.job.JobInstance.JOB_NAME;
import static org.apache.griffin.core.job.JobInstance.MEASURE_KEY;
import static org.apache.griffin.core.job.JobInstance.PREDICATES_KEY;
import static org.apache.griffin.core.job.JobInstance.PREDICATE_JOB_NAME;
import static org.apache.griffin.core.job.entity.LivySessionStates.State;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.FOUND;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_FOUND;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
import static org.apache.griffin.core.util.JsonUtil.toEntity;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.griffin.core.job.entity.JobInstanceBean;
import org.apache.griffin.core.job.entity.SegmentPredicate;
import org.apache.griffin.core.job.factory.PredicatorFactory;
import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.measure.entity.GriffinMeasure;
import org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType;
import org.apache.griffin.core.util.JsonUtil;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.quartz.SchedulerException;
import org.quartz.SimpleTrigger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
/**
* Simple implementation of the Quartz Job interface, submitting the
* griffin job to spark cluster via livy
*
* @see LivyTaskSubmitHelper#postToLivy(String)
* @see Job#execute(JobExecutionContext)
*/
@PersistJobDataAfterExecution
@DisallowConcurrentExecution
@Component
public class SparkSubmitJob implements Job {
private static final Logger LOGGER =
LoggerFactory.getLogger(SparkSubmitJob.class);
@Autowired
private JobInstanceRepo jobInstanceRepo;
@Autowired
private BatchJobOperatorImpl batchJobOp;
@Autowired
private Environment env;
@Autowired
private LivyTaskSubmitHelper livyTaskSubmitHelper;
@Value("${livy.need.queue:false}")
private boolean isNeedLivyQueue;
@Value("${livy.task.appId.retry.count:3}")
private int appIdRetryCount;
private GriffinMeasure measure;
private String livyUri;
private List<SegmentPredicate> mPredicates;
private JobInstanceBean jobInstance;
@Override
public void execute(JobExecutionContext context) {
JobDetail jd = context.getJobDetail();
try {
if (isNeedLivyQueue) {
//livy batch limit
livyTaskSubmitHelper.addTaskToWaitingQueue(jd);
} else {
saveJobInstance(jd);
}
} catch (Exception e) {
LOGGER.error("Post spark task ERROR.", e);
}
}
private void updateJobInstanceState(JobExecutionContext context)
throws IOException {
SimpleTrigger simpleTrigger = (SimpleTrigger) context.getTrigger();
int repeatCount = simpleTrigger.getRepeatCount();
int fireCount = simpleTrigger.getTimesTriggered();
if (fireCount > repeatCount) {
saveJobInstance(null, NOT_FOUND);
}
}
private String post2Livy() {
return livyTaskSubmitHelper.postToLivy(livyUri);
}
private boolean success(List<SegmentPredicate> predicates) {
if (CollectionUtils.isEmpty(predicates)) {
return true;
}
for (SegmentPredicate segPredicate : predicates) {
Predicator predicator = PredicatorFactory
.newPredicateInstance(segPredicate);
try {
if (predicator != null && !predicator.predicate()) {
return false;
}
} catch (Exception e) {
return false;
}
}
return true;
}
private void initParam(JobDetail jd) throws IOException {
mPredicates = new ArrayList<>();
jobInstance = jobInstanceRepo.findByPredicateName(jd.getJobDataMap()
.getString(PREDICATE_JOB_NAME));
measure = toEntity(jd.getJobDataMap().getString(MEASURE_KEY),
GriffinMeasure.class);
livyUri = env.getProperty("livy.uri");
setPredicates(jd.getJobDataMap().getString(PREDICATES_KEY));
// in order to keep metric name unique, we set job name
// as measure name at present
measure.setName(jd.getJobDataMap().getString(JOB_NAME));
}
@SuppressWarnings({"unchecked", "rawtypes"})
private void setPredicates(String json) throws IOException {
if (StringUtils.isEmpty(json)) {
return;
}
List<SegmentPredicate> predicates = toEntity(json,
new TypeReference<List<SegmentPredicate>>() {
});
if (predicates != null) {
mPredicates.addAll(predicates);
}
}
private String escapeCharacter(String str, String regex) {
if (StringUtils.isEmpty(str)) {
return str;
}
String escapeCh = "\\" + regex;
return str.replaceAll(regex, escapeCh);
}
private String genEnv() {
ProcessType type = measure.getProcessType();
String env = type == BATCH ? ENV_BATCH : ENV_STREAMING;
return env.replaceAll("\\$\\{JOB_NAME}", measure.getName());
}
private void setLivyConf() throws IOException {
setLivyArgs();
}
private void setLivyArgs() throws IOException {
List<String> args = new ArrayList<>();
args.add(genEnv());
String measureJson = JsonUtil.toJsonWithFormat(measure);
// to fix livy bug: character will be ignored by livy
String finalMeasureJson = escapeCharacter(measureJson, "\\`");
LOGGER.info(finalMeasureJson);
args.add(finalMeasureJson);
args.add("raw,raw");
livyConfMap.put("args", args);
}
protected void saveJobInstance(JobDetail jd) throws SchedulerException,
IOException {
// If result is null, it may livy uri is wrong
// or livy parameter is wrong.
initParam(jd);
setLivyConf();
if (!success(mPredicates)) {
updateJobInstanceState((JobExecutionContext) jd);
return;
}
Map<String, Object> resultMap = post2LivyWithRetry();
String group = jd.getKey().getGroup();
String name = jd.getKey().getName();
batchJobOp.deleteJob(group, name);
LOGGER.info("Delete predicate job({},{}) SUCCESS.", group, name);
setJobInstance(resultMap, FOUND);
jobInstanceRepo.save(jobInstance);
}
private Map<String, Object> post2LivyWithRetry()
throws IOException {
String result = post2Livy();
Map<String, Object> resultMap = null;
if (result != null) {
resultMap = livyTaskSubmitHelper.retryLivyGetAppId(result, appIdRetryCount);
if (resultMap != null) {
livyTaskSubmitHelper.increaseCurTaskNum(Long.valueOf(
String.valueOf(resultMap.get("id"))).longValue());
}
}
return resultMap;
}
protected void saveJobInstance(String result, State state)
throws IOException {
TypeReference<HashMap<String, Object>> type =
new TypeReference<HashMap<String, Object>>() {
};
Map<String, Object> resultMap = null;
if (result != null) {
resultMap = toEntity(result, type);
}
setJobInstance(resultMap, state);
jobInstanceRepo.save(jobInstance);
}
private void setJobInstance(Map<String, Object> resultMap, State state) {
jobInstance.setState(state);
jobInstance.setPredicateDeleted(true);
if (resultMap != null) {
Object status = resultMap.get("state");
Object id = resultMap.get("id");
Object appId = resultMap.get("appId");
jobInstance.setState(status == null ? null : State.valueOf(status
.toString().toUpperCase()));
jobInstance.setSessionId(id == null ? null : Long.parseLong(id
.toString()));
jobInstance.setAppId(appId == null ? null : appId.toString());
}
}
}
| 4,082 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/JobHealth.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
public class JobHealth {
private int healthyJobCount;
private int jobCount;
public int getHealthyJobCount() {
return healthyJobCount;
}
public void setHealthyJobCount(int healthyJobCount) {
this.healthyJobCount = healthyJobCount;
}
public int getJobCount() {
return jobCount;
}
public void setJobCount(int jobCount) {
this.jobCount = jobCount;
}
public JobHealth(int healthyJobCount, int jobCount) {
this.healthyJobCount = healthyJobCount;
this.jobCount = jobCount;
}
public JobHealth() {
}
}
| 4,083 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/BatchJob.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import java.util.List;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
@Entity
@DiscriminatorValue("griffinBatchJob")
public class BatchJob extends AbstractJob {
private static final long serialVersionUID = -1114269860236729008L;
@Override
public String getType() {
return JobType.BATCH.getName();
}
public BatchJob() {
super();
}
public BatchJob(Long measureId, String jobName, String name, String group,
boolean deleted) {
super(measureId, jobName, name, group, deleted);
this.metricName = jobName;
}
public BatchJob(Long jobId, Long measureId, String jobName, String qJobName,
String qGroupName, boolean deleted) {
this(measureId, jobName, qJobName, qGroupName, deleted);
setId(jobId);
}
public BatchJob(Long measureId, String jobName, String cronExpression,
String timeZone, List<JobDataSegment> segments,
boolean deleted) {
super(measureId, jobName, cronExpression, timeZone, segments, deleted);
}
}
| 4,084 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/JobInstanceBean.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import static org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType.BATCH;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.apache.griffin.core.job.entity.LivySessionStates.State;
import org.apache.griffin.core.measure.entity.AbstractAuditableEntity;
import org.apache.griffin.core.measure.entity.GriffinMeasure.ProcessType;
@Entity
@Table(indexes = {@Index(columnList = "triggerKey")})
public class JobInstanceBean extends AbstractAuditableEntity {
private static final long serialVersionUID = -4748881017029815874L;
private Long sessionId;
@Enumerated(EnumType.STRING)
private State state;
@Enumerated(EnumType.STRING)
private ProcessType type = BATCH;
/**
* The application id of this session
**/
@JsonInclude(JsonInclude.Include.NON_NULL)
private String appId;
@Column(length = 2 * 1024)
@JsonInclude(JsonInclude.Include.NON_NULL)
private String appUri;
@Column(name = "timestamp")
private Long tms;
@Column(name = "expire_timestamp")
private Long expireTms;
@Column(name = "predicate_group_name")
@JsonInclude(JsonInclude.Include.NON_NULL)
private String predicateGroup;
@Column(name = "predicate_job_name")
@JsonInclude(JsonInclude.Include.NON_NULL)
private String predicateName;
@Column(name = "predicate_job_deleted")
@JsonIgnore
private boolean predicateDeleted = false;
@JsonIgnore
private boolean deleted = false;
@ManyToOne
@JoinColumn(name = "job_id", nullable = false)
@JsonIgnore
private AbstractJob job;
private String triggerKey;
public AbstractJob getJob() {
return job;
}
public void setJob(AbstractJob job) {
this.job = job;
}
public Long getSessionId() {
return sessionId;
}
public void setSessionId(Long sessionId) {
this.sessionId = sessionId;
}
public State getState() {
return state;
}
public void setState(State state) {
this.state = state;
}
public ProcessType getType() {
return type;
}
public void setType(ProcessType type) {
this.type = type;
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
public String getAppUri() {
return appUri;
}
public void setAppUri(String appUri) {
this.appUri = appUri;
}
@JsonProperty("timestamp")
public Long getTms() {
return tms;
}
public void setTms(Long tms) {
this.tms = tms;
}
@JsonProperty("expireTimestamp")
public Long getExpireTms() {
return expireTms;
}
public void setExpireTms(Long expireTms) {
this.expireTms = expireTms;
}
public String getPredicateGroup() {
return predicateGroup;
}
public void setPredicateGroup(String predicateGroup) {
this.predicateGroup = predicateGroup;
}
public String getPredicateName() {
return predicateName;
}
public void setPredicateName(String predicateName) {
this.predicateName = predicateName;
}
public boolean isPredicateDeleted() {
return predicateDeleted;
}
public void setPredicateDeleted(boolean predicateDeleted) {
this.predicateDeleted = predicateDeleted;
}
public boolean isDeleted() {
return deleted;
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
public String getTriggerKey() {
return triggerKey;
}
public void setTriggerKey(String triggerKey) {
this.triggerKey = triggerKey;
}
public JobInstanceBean() {
}
public JobInstanceBean(State state, Long tms, Long expireTms, String appId) {
this.state = state;
this.tms = tms;
this.expireTms = expireTms;
this.appId = appId;
}
public JobInstanceBean(State state, Long tms, Long expireTms) {
this.state = state;
this.tms = tms;
this.expireTms = expireTms;
}
public JobInstanceBean(State state, String pName, String pGroup, Long tms,
Long expireTms) {
this.state = state;
this.predicateName = pName;
this.predicateGroup = pGroup;
this.tms = tms;
this.expireTms = expireTms;
}
public JobInstanceBean(State state, String pName, String pGroup, Long tms,
Long expireTms, AbstractJob job) {
this(state, pName, pGroup, tms, expireTms);
this.job = job;
}
public JobInstanceBean(State state, String pName, String pGroup, Long tms,
Long expireTms, ProcessType type) {
this(state, pName, pGroup, tms, expireTms);
this.type = type;
}
public JobInstanceBean(Long sessionId, State state, String appId,
String appUri, Long timestamp, Long expireTms) {
this.sessionId = sessionId;
this.state = state;
this.appId = appId;
this.appUri = appUri;
this.tms = timestamp;
this.expireTms = expireTms;
}
}
| 4,085 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/VirtualJob.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
@Entity
@DiscriminatorValue("virtualJob")
public class VirtualJob extends AbstractJob {
private static final long serialVersionUID = 1130038058433818835L;
@Override
public String getType() {
return JobType.VIRTUAL.getName();
}
public VirtualJob() {
super();
}
public VirtualJob(String jobName, Long measureId, String metricName) {
super(jobName, measureId, metricName);
}
}
| 4,086 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/AbstractJob.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.DiscriminatorColumn;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.OneToMany;
import javax.persistence.PostLoad;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.measure.entity.AbstractAuditableEntity;
import org.apache.griffin.core.util.JsonUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Entity
@Table(name = "job")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY,
property = "job.type")
@JsonSubTypes({@JsonSubTypes.Type(value = BatchJob.class, name = "batch"),
@JsonSubTypes.Type(
value = StreamingJob.class,
name = "streaming"),
@JsonSubTypes.Type(
value = VirtualJob.class,
name = "virtual")})
@DiscriminatorColumn(name = "type")
public abstract class AbstractJob extends AbstractAuditableEntity {
private static final long serialVersionUID = 7569493377868453677L;
private static final Logger LOGGER = LoggerFactory
.getLogger(AbstractJob.class);
protected Long measureId;
protected String jobName;
protected String metricName;
@Column(name = "quartz_job_name")
@JsonInclude(JsonInclude.Include.NON_NULL)
private String name;
@Column(name = "quartz_group_name")
@JsonInclude(JsonInclude.Include.NON_NULL)
private String group;
@JsonIgnore
protected boolean deleted = false;
@JsonInclude(JsonInclude.Include.NON_NULL)
private String cronExpression;
@Transient
@JsonInclude(JsonInclude.Include.NON_NULL)
private JobState jobState;
@NotNull
private String timeZone;
@JsonIgnore
private String predicateConfig;
@Transient
private Map<String, Object> configMap;
@NotNull
@OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST,
CascadeType.REMOVE, CascadeType.MERGE})
@JoinColumn(name = "job_id")
private List<JobDataSegment> segments = new ArrayList<>();
@JsonProperty("measure.id")
public Long getMeasureId() {
return measureId;
}
public void setMeasureId(Long measureId) {
this.measureId = measureId;
}
@JsonProperty("job.name")
public String getJobName() {
return jobName;
}
public void setJobName(String jobName) {
if (StringUtils.isEmpty(jobName)) {
LOGGER.warn("Job name cannot be empty.");
throw new NullPointerException();
}
this.jobName = jobName;
}
@JsonProperty("cron.expression")
public String getCronExpression() {
return cronExpression;
}
public void setCronExpression(String cronExpression) {
this.cronExpression = cronExpression;
}
@JsonProperty("job.state")
public JobState getJobState() {
return jobState;
}
public void setJobState(JobState jobState) {
this.jobState = jobState;
}
@JsonProperty("cron.time.zone")
public String getTimeZone() {
return timeZone;
}
public void setTimeZone(String timeZone) {
this.timeZone = timeZone;
}
@JsonProperty("data.segments")
public List<JobDataSegment> getSegments() {
return segments;
}
public void setSegments(List<JobDataSegment> segments) {
this.segments = segments;
}
@JsonProperty("predicate.config")
public Map<String, Object> getConfigMap() {
return configMap;
}
public void setConfigMap(Map<String, Object> configMap) {
this.configMap = configMap;
}
private String getPredicateConfig() {
return predicateConfig;
}
private void setPredicateConfig(String config) {
this.predicateConfig = config;
}
@JsonProperty("metric.name")
public String getMetricName() {
return metricName;
}
public void setMetricName(String metricName) {
this.metricName = metricName;
}
public boolean isDeleted() {
return deleted;
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
@JsonProperty("quartz.name")
public String getName() {
return name;
}
public void setName(String quartzName) {
this.name = quartzName;
}
@JsonProperty("quartz.group")
public String getGroup() {
return group;
}
public void setGroup(String quartzGroup) {
this.group = quartzGroup;
}
@JsonProperty("job.type")
public abstract String getType();
@PrePersist
@PreUpdate
public void save() throws JsonProcessingException {
if (configMap != null) {
this.predicateConfig = JsonUtil.toJson(configMap);
}
}
@PostLoad
public void load() throws IOException {
if (!StringUtils.isEmpty(predicateConfig)) {
this.configMap = JsonUtil.toEntity(predicateConfig,
new TypeReference<Map<String, Object>>() {
});
}
}
AbstractJob() {
}
AbstractJob(Long measureId, String jobName, String name, String group,
boolean deleted) {
this.measureId = measureId;
this.jobName = jobName;
this.name = name;
this.group = group;
this.deleted = deleted;
}
AbstractJob(Long measureId, String jobName, String cronExpression,
String timeZone, List<JobDataSegment> segments,
boolean deleted) {
this.measureId = measureId;
this.jobName = jobName;
this.metricName = jobName;
this.cronExpression = cronExpression;
this.timeZone = timeZone;
this.segments = segments;
this.deleted = deleted;
}
AbstractJob(String jobName, Long measureId, String metricName) {
this.jobName = jobName;
this.measureId = measureId;
this.metricName = metricName;
}
}
| 4,087 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/JobType.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
public enum JobType {
BATCH("batch"), //
STREAMING("streaming"), //
VIRTUAL("virtual");
private String name;
private JobType(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
| 4,088 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/SegmentRange.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import javax.persistence.Column;
import javax.persistence.Entity;
import org.apache.griffin.core.measure.entity.AbstractAuditableEntity;
@Entity
public class SegmentRange extends AbstractAuditableEntity {
private static final long serialVersionUID = -8929713841303669564L;
@Column(name = "data_begin")
private String begin = "-1h";
private String length = "1h";
public String getBegin() {
return begin;
}
public void setBegin(String begin) {
this.begin = begin;
}
public String getLength() {
return length;
}
public void setLength(String length) {
this.length = length;
}
public SegmentRange(String begin, String length) {
this.begin = begin;
this.length = length;
}
SegmentRange() {
}
}
| 4,089 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/JobDataSegment.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.validation.constraints.NotNull;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.measure.entity.AbstractAuditableEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Entity
public class JobDataSegment extends AbstractAuditableEntity {
private static final long serialVersionUID = -9056531122243340484L;
private static final Logger LOGGER = LoggerFactory
.getLogger(JobDataSegment.class);
@NotNull
private String dataConnectorName;
private boolean asTsBaseline = false;
@OneToOne(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST,
CascadeType.REMOVE, CascadeType.MERGE})
@JoinColumn(name = "segment_range_id")
private SegmentRange segmentRange = new SegmentRange();
@JsonProperty("as.baseline")
public boolean isAsTsBaseline() {
return asTsBaseline;
}
public void setAsTsBaseline(boolean asTsBaseline) {
this.asTsBaseline = asTsBaseline;
}
@JsonProperty("segment.range")
public SegmentRange getSegmentRange() {
return segmentRange;
}
public void setSegmentRange(SegmentRange segmentRange) {
this.segmentRange = segmentRange;
}
@JsonProperty("data.connector.name")
public String getDataConnectorName() {
return dataConnectorName;
}
public void setDataConnectorName(String dataConnectorName) {
if (StringUtils.isEmpty(dataConnectorName)) {
LOGGER.warn(" Data connector name is invalid. " +
"Please check your connector name.");
throw new NullPointerException();
}
this.dataConnectorName = dataConnectorName;
}
public JobDataSegment() {
}
public JobDataSegment(String dataConnectorName, boolean baseline) {
this.dataConnectorName = dataConnectorName;
this.asTsBaseline = baseline;
}
public JobDataSegment(String dataConnectorName, boolean baseline,
SegmentRange segmentRange) {
this.dataConnectorName = dataConnectorName;
this.asTsBaseline = baseline;
this.segmentRange = segmentRange;
}
}
| 4,090 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/StreamingJob.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
@Entity
@DiscriminatorValue("griffinStreamingJob")
public class StreamingJob extends AbstractJob {
private static final long serialVersionUID = 3292253488392308505L;
@Override
public String getType() {
return JobType.STREAMING.getName();
}
public StreamingJob() {
}
public StreamingJob(Long measureId, String jobName, String name,
String group, boolean deleted) {
super(measureId, jobName, name, group, deleted);
this.metricName = jobName;
}
}
| 4,091 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/LivyConf.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
public class LivyConf implements Serializable {
private String file;
private String className;
private List<String> args;
private String name;
private String queue;
private Long numExecutors;
private Long executorCores;
private String driverMemory;
private String executorMemory;
private Map<String, String> conf;
private List<String> jars;
private List<String> files;
public String getFile() {
return file;
}
public void setFile(String file) {
this.file = file;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public List<String> getArgs() {
return args;
}
public void setArgs(List<String> args) {
this.args = args;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getQueue() {
return queue;
}
public void setQueue(String queue) {
this.queue = queue;
}
public Long getNumExecutors() {
return numExecutors;
}
public void setNumExecutors(Long numExecutors) {
this.numExecutors = numExecutors;
}
public Long getExecutorCores() {
return executorCores;
}
public void setExecutorCores(Long executorCores) {
this.executorCores = executorCores;
}
public String getDriverMemory() {
return driverMemory;
}
public void setDriverMemory(String driverMemory) {
this.driverMemory = driverMemory;
}
public String getExecutorMemory() {
return executorMemory;
}
public void setExecutorMemory(String executorMemory) {
this.executorMemory = executorMemory;
}
public Map<String, String> getConf() {
return conf;
}
public void setConf(Map<String, String> conf) {
this.conf = conf;
}
public List<String> getJars() {
return jars;
}
public void setJars(List<String> jars) {
this.jars = jars;
}
public List<String> getFiles() {
return files;
}
public void setFiles(List<String> files) {
this.files = files;
}
}
| 4,092 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.DEAD;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.FINDING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.FOUND;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_FOUND;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.NOT_STARTED;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.RUNNING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.SHUTTING_DOWN;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.STARTING;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.STOPPED;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.SUCCESS;
import static org.apache.griffin.core.job.entity.LivySessionStates.State.UNKNOWN;
import com.cloudera.livy.sessions.SessionState;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
public class LivySessionStates {
/**
* UNKNOWN is used to represent the state that server get null from Livy.
* the other state is just same as com.cloudera.livy.sessions.SessionState.
*/
public enum State {
NOT_STARTED,
STARTING,
RECOVERING,
IDLE,
RUNNING,
BUSY,
SHUTTING_DOWN,
ERROR,
DEAD,
SUCCESS,
UNKNOWN,
STOPPED,
FINDING,
NOT_FOUND,
FOUND
}
private static SessionState toSessionState(State state) {
if (state == null) {
return null;
}
switch (state) {
case NOT_STARTED:
return new SessionState.NotStarted();
case STARTING:
return new SessionState.Starting();
case RECOVERING:
return new SessionState.Recovering();
case IDLE:
return new SessionState.Idle();
case RUNNING:
return new SessionState.Running();
case BUSY:
return new SessionState.Busy();
case SHUTTING_DOWN:
return new SessionState.ShuttingDown();
case ERROR:
return new SessionState.Error(System.nanoTime());
case DEAD:
return new SessionState.Dead(System.nanoTime());
case SUCCESS:
return new SessionState.Success(System.nanoTime());
default:
return null;
}
}
public static State toLivyState(JsonObject object) {
if (object != null) {
JsonElement state = object.get("state");
JsonElement finalStatus = object.get("finalStatus");
State finalState = parseState(state);
return finalState != null ? finalState : parseState(finalStatus);
}
return UNKNOWN;
}
private static State parseState(JsonElement state) {
if (state == null) {
return null;
}
switch (state.getAsString()) {
case "NEW":
case "NEW_SAVING":
case "SUBMITTED":
return NOT_STARTED;
case "ACCEPTED":
return STARTING;
case "RUNNING":
return RUNNING;
case "SUCCEEDED":
return SUCCESS;
case "FAILED":
return DEAD;
case "KILLED":
return SHUTTING_DOWN;
case "FINISHED":
return null;
default:
return UNKNOWN;
}
}
public static boolean isActive(State state) {
if (UNKNOWN.equals(state) || STOPPED.equals(state) || NOT_FOUND.equals
(state) || FOUND.equals(state)) {
// set UNKNOWN isActive() as false.
return false;
} else if (FINDING.equals(state)) {
return true;
}
SessionState sessionState = toSessionState(state);
return sessionState != null && sessionState.isActive();
}
public static String convert2QuartzState(State state) {
SessionState sessionState = toSessionState(state);
if (STOPPED.equals(state) || SUCCESS.equals(state)) {
return "COMPLETE";
}
if (UNKNOWN.equals(state) || NOT_FOUND.equals(state)
|| FOUND.equals(state) || sessionState == null
|| !sessionState.isActive()) {
return "ERROR";
}
return "NORMAL";
}
public static boolean isHealthy(State state) {
return !(State.ERROR.equals(state) || State.DEAD.equals(state)
|| State.SHUTTING_DOWN.equals(state)
|| State.FINDING.equals(state)
|| State.NOT_FOUND.equals(state)
|| State.FOUND.equals(state));
}
}
| 4,093 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/JobState.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import com.fasterxml.jackson.annotation.JsonInclude;
/**
* Encapsulating job scheduler state to reduce job startup and stop logical
* processing
*/
public class JobState {
/**
* job scheduler state
*/
private String state;
/**
* whether job can be started
*/
private boolean toStart = false;
/**
* whether job can be stopped
*/
private boolean toStop = false;
@JsonInclude(JsonInclude.Include.NON_NULL)
private Long nextFireTime;
@JsonInclude(JsonInclude.Include.NON_NULL)
private Long previousFireTime;
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public boolean isToStart() {
return toStart;
}
public void setToStart(boolean toStart) {
this.toStart = toStart;
}
public boolean isToStop() {
return toStop;
}
public void setToStop(boolean toStop) {
this.toStop = toStop;
}
public Long getNextFireTime() {
return nextFireTime;
}
public void setNextFireTime(Long nextFireTime) {
this.nextFireTime = nextFireTime;
}
public Long getPreviousFireTime() {
return previousFireTime;
}
public void setPreviousFireTime(Long previousFireTime) {
this.previousFireTime = previousFireTime;
}
}
| 4,094 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/entity/SegmentPredicate.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.entity;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import java.io.IOException;
import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.PostLoad;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import javax.persistence.Transient;
import org.apache.commons.lang.StringUtils;
import org.apache.griffin.core.measure.entity.AbstractAuditableEntity;
import org.apache.griffin.core.util.JsonUtil;
@Entity
public class SegmentPredicate extends AbstractAuditableEntity {
private static final long serialVersionUID = 1942715275465116154L;
private String type;
@JsonIgnore
private String config;
@Transient
private Map<String, Object> configMap;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@JsonProperty("config")
public Map<String, Object> getConfigMap() {
return configMap;
}
public void setConfigMap(Map<String, Object> configMap) {
this.configMap = configMap;
}
private String getConfig() {
return config;
}
public void setConfig(String config) {
this.config = config;
}
@PrePersist
@PreUpdate
public void save() throws JsonProcessingException {
if (configMap != null) {
this.config = JsonUtil.toJson(configMap);
}
}
@PostLoad
public void load() throws IOException {
if (!StringUtils.isEmpty(config)) {
this.configMap = JsonUtil.toEntity(config,
new TypeReference<Map<String, Object>>() {
});
}
}
public SegmentPredicate() {
}
public SegmentPredicate(String type, Map<String, String> configMap)
throws JsonProcessingException {
this.type = type;
this.config = JsonUtil.toJson(configMap);
}
}
| 4,095 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/repo/JobRepo.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.repo;
import org.apache.griffin.core.job.entity.AbstractJob;
import org.springframework.data.jpa.repository.Query;
import java.util.List;
public interface JobRepo<T extends AbstractJob> extends BaseJpaRepository<T, Long> {
@Query("select count(j) from #{#entityName} j " +
"where j.jobName = ?1 and j.deleted = ?2")
int countByJobNameAndDeleted(String jobName, Boolean deleted);
List<T> findByDeleted(boolean deleted);
List<T> findByJobNameAndDeleted(String jobName, boolean deleted);
List<T> findByMeasureIdAndDeleted(Long measureId, boolean deleted);
T findByIdAndDeleted(Long jobId, boolean deleted);
}
| 4,096 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/repo/VirtualJobRepo.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.repo;
import org.apache.griffin.core.job.entity.VirtualJob;
public interface VirtualJobRepo extends JobRepo<VirtualJob> {
}
| 4,097 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/repo/JobDataSegmentRepo.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.repo;
import org.apache.griffin.core.job.entity.JobDataSegment;
import org.springframework.data.repository.CrudRepository;
public interface JobDataSegmentRepo extends CrudRepository<JobDataSegment, Long> {
}
| 4,098 |
0 | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job | Create_ds/griffin/service/src/main/java/org/apache/griffin/core/job/repo/BaseJpaRepository.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.griffin.core.job.repo;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
@NoRepositoryBean
public interface BaseJpaRepository<T, ID> extends JpaRepository<T, ID> {
/**
* This method is to make findOne method from spring boot 1.5.x compatible with spring 2.x
*
* @param id id
* @return object of specified id, return null if cannot find the id
*/
default T findOne(ID id) {
return (T) findById(id).orElse(null);
}
}
| 4,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.