index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/PutMediaWorker.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideo; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoPutMedia; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoPutMediaClient; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoPutMediaClientBuilder; import com.amazonaws.services.kinesisvideo.PutMediaAckResponseHandler; import com.amazonaws.services.kinesisvideo.model.APIName; import com.amazonaws.services.kinesisvideo.model.AckEvent; import com.amazonaws.services.kinesisvideo.model.AckEventType; import com.amazonaws.services.kinesisvideo.model.FragmentTimecodeType; import com.amazonaws.services.kinesisvideo.model.GetDataEndpointRequest; import com.amazonaws.services.kinesisvideo.model.PutMediaRequest; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import java.io.InputStream; import java.util.Date; import java.util.concurrent.CountDownLatch; /** * Worker used to make a PutMedia call to Kinesis Video for a stream and stream in some video. */ @Slf4j public class PutMediaWorker extends KinesisVideoCommon implements Runnable { private final InputStream inputStream; private final AmazonKinesisVideoPutMedia putMedia; @Getter private long numFragmentsPersisted = 0; private PutMediaWorker(Regions region, AWSCredentialsProvider credentialsProvider, String streamName, InputStream inputStream, String endPoint) { super(region, credentialsProvider, streamName); this.inputStream = inputStream; AmazonKinesisVideoPutMediaClientBuilder builder = AmazonKinesisVideoPutMediaClient.builder().withEndpoint(endPoint); conifgurePutMediaClient(builder); this.putMedia = builder.build(); } public static PutMediaWorker create(Regions region, AWSCredentialsProvider credentialsProvider, String streamName, InputStream inputStream, AmazonKinesisVideo amazonKinesisVideo) { String endPoint = amazonKinesisVideo.getDataEndpoint(new GetDataEndpointRequest().withAPIName(APIName.PUT_MEDIA) .withStreamName(streamName)).getDataEndpoint(); return new PutMediaWorker(region, credentialsProvider, streamName, inputStream, endPoint); } @Override public void run() { CountDownLatch latch = new CountDownLatch(1); putMedia.putMedia(new PutMediaRequest().withStreamName(streamName) .withFragmentTimecodeType(FragmentTimecodeType.RELATIVE) .withProducerStartTimestamp(new Date()) .withPayload(inputStream), new PutMediaAckResponseHandler() { @Override public void onAckEvent(AckEvent event) { log.info("PutMedia Ack for stream {}: {} ", streamName, event.toString()); if (AckEventType.Values.PERSISTED.equals(event.getAckEventType().getEnumValue())) { numFragmentsPersisted++; } } @Override public void onFailure(Throwable t) { log.error("PutMedia for {} has suffered error {}", streamName, t); latch.countDown(); } @Override public void onComplete() { log.info("PutMedia for {} is complete ", streamName); latch.countDown(); } }); log.info("Made PutMedia call for stream {}", streamName); try { latch.await(); log.info("PutMedia worker exiting for stream {} number of fragments persisted {} ", streamName, numFragmentsPersisted); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Failure while waiting for PutMedia to finish", e); } finally { putMedia.close(); } } }
5,400
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/KinesisVideoCommon.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoPutMediaClientBuilder; import lombok.Getter; import lombok.RequiredArgsConstructor; /** * Abstract class for all example classes that use the Kinesis Video clients. */ @RequiredArgsConstructor @Getter public abstract class KinesisVideoCommon { private final Regions region; private final AWSCredentialsProvider credentialsProvider; protected final String streamName; protected void configureClient(AwsClientBuilder clientBuilder) { clientBuilder.withCredentials(credentialsProvider).withRegion(region); } protected void conifgurePutMediaClient(AmazonKinesisVideoPutMediaClientBuilder builder) { builder.withCredentials(credentialsProvider).withRegion(region); } }
5,401
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/KinesisVideoRendererExample.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.kinesisvideo.parser.utilities.FragmentMetadataVisitor; import com.amazonaws.kinesisvideo.parser.utilities.FrameVisitor; import com.amazonaws.kinesisvideo.parser.utilities.H264FrameRenderer; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.model.StartSelector; import com.amazonaws.services.kinesisvideo.model.StartSelectorType; import lombok.Builder; import lombok.Getter; import lombok.extern.slf4j.Slf4j; /* * Example for integrating with Kinesis Video. * This example does: * 1. Create a stream, deleting and recreating if the stream of the same name already exists. It sets the retention period of the created stream to 48 hours. * 2. Call PutMedia to stream video fragments into the stream. * 3. Calls GetMedia to stream video fragments out of the stream. * 4. It uses the StreamingMkvParser to parse the returned the stream and perform these steps: * 2.1 The GetMedia output stream has one mkv segment for each fragment. Merge the mkv segments that share track * information into a single segment. * 2.2 Decodes the frames using h264 decoder (using JCodec) and * 2.3 It renders the image using JFrame for viewing * * To run the example: * Run the Unit test KinesisVideoRendererExampleTest * */ @Slf4j public class KinesisVideoRendererExample extends KinesisVideoCommon { private static final int FRAME_WIDTH=640; private static final int FRAME_HEIGHT=480; private final InputStream inputStream; private final StreamOps streamOps; private final ExecutorService executorService; private KinesisVideoRendererExample.GetMediaProcessingArguments getMediaProcessingArguments; private boolean renderFragmentMetadata = true; private boolean noSampleInputRequired = false; @Builder private KinesisVideoRendererExample(Regions region, String streamName, AWSCredentialsProvider credentialsProvider, InputStream inputVideoStream, boolean renderFragmentMetadata, boolean noSampleInputRequired) { super(region, credentialsProvider, streamName); this.inputStream = inputVideoStream; this.streamOps = new StreamOps(region, streamName, credentialsProvider); this.executorService = Executors.newFixedThreadPool(2); this.renderFragmentMetadata = renderFragmentMetadata; this.noSampleInputRequired = noSampleInputRequired; } /** * This method executes the example. * * @throws InterruptedException the thread is interrupted while waiting for the streamOps to enter the correct state. * @throws IOException fails to read video from the input stream or write to the output stream. */ public void execute() throws InterruptedException, IOException { streamOps.createStreamIfNotExist(); getMediaProcessingArguments = KinesisVideoRendererExample.GetMediaProcessingArguments.create( renderFragmentMetadata ? Optional.of(new FragmentMetadataVisitor.BasicMkvTagProcessor()) : Optional.empty()); try (KinesisVideoRendererExample.GetMediaProcessingArguments getMediaProcessingArgumentsLocal = getMediaProcessingArguments) { if (!noSampleInputRequired) { //Start a PutMedia worker to write data to a Kinesis Video Stream. PutMediaWorker putMediaWorker = PutMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), inputStream, streamOps.amazonKinesisVideo); executorService.submit(putMediaWorker); } //Start a GetMedia worker to read and process data from the Kinesis Video Stream. GetMediaWorker getMediaWorker = GetMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), new StartSelector().withStartSelectorType(StartSelectorType.NOW), streamOps.amazonKinesisVideo, getMediaProcessingArgumentsLocal.getFrameVisitor()); executorService.submit(getMediaWorker); //Wait for the workers to finish. executorService.shutdown(); executorService.awaitTermination(180, TimeUnit.SECONDS); if (!executorService.isTerminated()) { log.warn("Shutting down executor service by force"); executorService.shutdownNow(); } else { log.info("Executor service is shutdown"); } } } private static class GetMediaProcessingArguments implements Closeable { @Getter private final FrameVisitor frameVisitor; GetMediaProcessingArguments(FrameVisitor frameVisitor) { this.frameVisitor = frameVisitor; } private static GetMediaProcessingArguments create( Optional<FragmentMetadataVisitor.MkvTagProcessor> tagProcessor) throws IOException { KinesisVideoFrameViewer kinesisVideoFrameViewer = new KinesisVideoFrameViewer(FRAME_WIDTH, FRAME_HEIGHT); return new GetMediaProcessingArguments( FrameVisitor.create(H264FrameRenderer.create(kinesisVideoFrameViewer), tagProcessor, Optional.of(1L))); // Video track number } @Override public void close() throws IOException { } } }
5,402
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/ContinuousGetMediaWorker.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.kinesisvideo.parser.mkv.MkvElementVisitException; import com.amazonaws.kinesisvideo.parser.utilities.FragmentMetadata; import com.amazonaws.kinesisvideo.parser.mkv.FrameProcessException; import com.amazonaws.kinesisvideo.parser.utilities.consumer.GetMediaResponseStreamConsumer; import com.amazonaws.kinesisvideo.parser.utilities.consumer.GetMediaResponseStreamConsumerFactory; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideo; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoMedia; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoMediaClientBuilder; import com.amazonaws.services.kinesisvideo.model.APIName; import com.amazonaws.services.kinesisvideo.model.GetDataEndpointRequest; import com.amazonaws.services.kinesisvideo.model.GetMediaRequest; import com.amazonaws.services.kinesisvideo.model.GetMediaResult; import com.amazonaws.services.kinesisvideo.model.StartSelector; import com.amazonaws.services.kinesisvideo.model.StartSelectorType; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.Validate; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; /** * Worker used to make a GetMedia call to Kinesis Video and stream in data and parse it and apply a visitor. */ @Slf4j public class ContinuousGetMediaWorker extends KinesisVideoCommon implements Runnable { private static final int HTTP_STATUS_OK = 200; private final AmazonKinesisVideoMedia videoMedia; private final GetMediaResponseStreamConsumerFactory consumerFactory; private final StartSelector startSelector; private Optional<String> fragmentNumberToStartAfter = Optional.empty(); private volatile AtomicBoolean shouldStop = new AtomicBoolean(false); private ContinuousGetMediaWorker(Regions region, AWSCredentialsProvider credentialsProvider, String streamName, StartSelector startSelector, String endPoint, GetMediaResponseStreamConsumerFactory consumerFactory) { super(region, credentialsProvider, streamName); AmazonKinesisVideoMediaClientBuilder builder = AmazonKinesisVideoMediaClientBuilder.standard() .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, region.getName())) .withCredentials(getCredentialsProvider()); this.videoMedia = builder.build(); this.consumerFactory = consumerFactory; this.startSelector = startSelector; } public static ContinuousGetMediaWorker create(Regions region, AWSCredentialsProvider credentialsProvider, String streamName, StartSelector startSelector, AmazonKinesisVideo amazonKinesisVideo, GetMediaResponseStreamConsumerFactory consumer) { String endPoint = amazonKinesisVideo.getDataEndpoint(new GetDataEndpointRequest().withAPIName(APIName.GET_MEDIA) .withStreamName(streamName)).getDataEndpoint(); return new ContinuousGetMediaWorker(region, credentialsProvider, streamName, startSelector, endPoint, consumer); } public void stop() { log.info("Stop ContinuousGetMediaWorker"); shouldStop.set(true); } @Override public void run() { log.info("Start ContinuousGetMedia worker for stream {}", streamName); while (!shouldStop.get()) { GetMediaResult getMediaResult = null; try { StartSelector selectorToUse = fragmentNumberToStartAfter.map(fn -> new StartSelector().withStartSelectorType(StartSelectorType.FRAGMENT_NUMBER) .withAfterFragmentNumber(fn)).orElse(startSelector); getMediaResult = videoMedia.getMedia(new GetMediaRequest().withStreamName(streamName).withStartSelector(selectorToUse)); log.info("Start processing GetMedia called for stream {} response {} requestId {}", streamName, getMediaResult.getSdkHttpMetadata().getHttpStatusCode(), getMediaResult.getSdkResponseMetadata().getRequestId()); if (getMediaResult.getSdkHttpMetadata().getHttpStatusCode() == HTTP_STATUS_OK) { try (GetMediaResponseStreamConsumer consumer = consumerFactory.createConsumer()) { consumer.process(getMediaResult.getPayload(), this::updateFragmentNumberToStartAfter); } } else { Thread.sleep(200); } } catch (FrameProcessException e) { log.error("FrameProcessException in ContinuousGetMedia worker for stream: " + streamName, e); break; } catch (IOException | MkvElementVisitException e) { log.error("Failure in ContinuousGetMedia worker for stream: " + streamName, e); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); throw new RuntimeException(ie); } catch (Throwable t) { log.error("Throwable",t); } finally { closeGetMediaResponse(getMediaResult); log.info("Exit processing GetMedia called for stream {}", streamName); } } log.info("Exit ContinuousGetMedia worker for stream {}", streamName); } private void closeGetMediaResponse(final GetMediaResult getMediaResult) { if (getMediaResult != null) { final InputStream payload = getMediaResult.getPayload(); if (payload != null) { try { payload.close(); } catch (final IOException e) { // Ignore close exception; } } } } private void updateFragmentNumberToStartAfter(FragmentMetadata f) { Validate.isTrue(!fragmentNumberToStartAfter.isPresent() || f.getFragmentNumberString().compareTo(fragmentNumberToStartAfter.get()) > 0); fragmentNumberToStartAfter = Optional.of(f.getFragmentNumberString()); } }
5,403
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/KinesisVideoRekognitionIntegrationExample.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.kinesisvideo.parser.kinesis.KinesisDataStreamsWorker; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognitionInput; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognizedFragmentsIndex; import com.amazonaws.kinesisvideo.parser.rekognition.processor.RekognitionStreamProcessor; import com.amazonaws.kinesisvideo.parser.utilities.FrameVisitor; import com.amazonaws.kinesisvideo.parser.utilities.H264BoundingBoxFrameRenderer; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.model.StartSelector; import com.amazonaws.services.kinesisvideo.model.StartSelectorType; import lombok.Builder; import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** * This examples demonstrates how to integrate KVS with Rekognition and draw bounding boxes while * rendering each frame in KinesisVideoFrameViewer. */ @Slf4j public class KinesisVideoRekognitionIntegrationExample extends KinesisVideoCommon { private static final int DEFAULT_FRAME_WIDTH =640; private static final int DEFAULT_FRAME_HEIGHT =480; private static final int INITIAL_DELAY=10_000; private final StreamOps streamOps; private final InputStream inputStream; private final ExecutorService executorService; private RekognitionStreamProcessor rekognitionStreamProcessor; private KinesisDataStreamsWorker kinesisDataStreamsWorker; private GetMediaWorker getMediaWorker; private String kdsStreamName; private RekognitionInput rekognitionInput; @Setter private Integer rekognitionMaxTimeoutInMillis; @Setter private int width = DEFAULT_FRAME_WIDTH; @Setter private int height = DEFAULT_FRAME_HEIGHT; private RekognizedFragmentsIndex rekognizedFragmentsIndex = new RekognizedFragmentsIndex(); @Builder private KinesisVideoRekognitionIntegrationExample(Regions region, InputStream inputStream, String kvsStreamName, String kdsStreamName, RekognitionInput rekognitionInput, AWSCredentialsProvider credentialsProvider) { super(region, credentialsProvider, kvsStreamName); this.streamOps = new StreamOps(region, kvsStreamName, credentialsProvider); this.inputStream = inputStream; this.kdsStreamName = kdsStreamName; this.rekognitionInput = rekognitionInput; this.executorService = Executors.newFixedThreadPool(3); } /** * This method executes the example. * @param timeOutinSec Timeout in seconds * @throws InterruptedException the thread is interrupted while waiting for the stream to enter the correct state. * @throws IOException fails to read video from the input stream or write to the output stream. */ public void execute(Long timeOutinSec) throws InterruptedException, IOException { // Start the RekognitionStreamProcessor and the KinesisDataStreams worker to read and process rekognized // face results. NOTE: Starting up KinesisClientLibrary can take some time, so start that first. startRekognitionProcessor(); startKinesisDataStreamsWorker(); // Adding some initial delay to sync both KVS and KDS data Thread.sleep(INITIAL_DELAY); // Start a GetMedia worker to read and render KVS fragments. startGetMediaWorker(); // Start a PutMedia worker to write data to a Kinesis Video Stream. NOTE: Video fragments can also be ingested // using real-time producer like the Kinesis Video GStreamer sample app or AmazonKinesisVideoDemoApp if (inputStream != null) { startPutMediaWorker(); } // Wait for the workers to finish. waitForTermination(timeOutinSec); cleanup(); } private void startPutMediaWorker() { PutMediaWorker putMediaWorker = PutMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), inputStream, streamOps.getAmazonKinesisVideo()); executorService.submit(putMediaWorker); } private void startGetMediaWorker() { final KinesisVideoBoundingBoxFrameViewer kinesisVideoBoundingBoxFrameViewer = new KinesisVideoBoundingBoxFrameViewer(width, height); final H264BoundingBoxFrameRenderer h264BoundingBoxFrameRenderer = H264BoundingBoxFrameRenderer.create( kinesisVideoBoundingBoxFrameViewer, rekognizedFragmentsIndex); if (rekognitionMaxTimeoutInMillis != null) { // Change the below timeout value to if the frames need to be rendered with low latency when // rekognition results are not present. h264BoundingBoxFrameRenderer.setMaxTimeout(rekognitionMaxTimeoutInMillis); } final FrameVisitor frameVisitor = FrameVisitor.create(h264BoundingBoxFrameRenderer); this.getMediaWorker = GetMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), new StartSelector().withStartSelectorType(StartSelectorType.NOW), streamOps.getAmazonKinesisVideo(), frameVisitor); executorService.submit(getMediaWorker); } private void startRekognitionProcessor() { this.rekognitionStreamProcessor = RekognitionStreamProcessor.create(getRegion(), getCredentialsProvider(), rekognitionInput); this.rekognitionStreamProcessor.process(); } private void startKinesisDataStreamsWorker() { this.kinesisDataStreamsWorker = KinesisDataStreamsWorker.create(getRegion(), getCredentialsProvider(), kdsStreamName, rekognizedFragmentsIndex); executorService.submit(kinesisDataStreamsWorker); } private void waitForTermination(final Long timeOutinSec) throws InterruptedException { executorService.shutdown(); executorService.awaitTermination(timeOutinSec, TimeUnit.SECONDS); } private void cleanup() { if (!executorService.isTerminated()) { log.warn("Shutting down executor service by force"); executorService.shutdownNow(); } else { log.info("Executor service is shutdown"); } this.rekognitionStreamProcessor.stopStreamProcessor(); } }
5,404
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/BoundingBoxImagePanel.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.BoundingBox; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.FaceType; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.MatchedFace; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognizedOutput; import lombok.extern.slf4j.Slf4j; /** * Panel which is used for rendering frames and embedding bounding boxes on the frames. */ @Slf4j public class BoundingBoxImagePanel extends ImagePanel { private static final String DELIMITER = "-"; @Override public void paintComponent(final Graphics g) { super.paintComponent(g); } public void processRekognitionOutput(final Graphics2D g2, final int width, final int height, final RekognizedOutput rekognizedOutput) { if (rekognizedOutput != null) { // Draw bounding boxes for faces. if (rekognizedOutput.getFaceSearchOutputs() != null) { log.debug("Number of detected faces in a frame {}", rekognizedOutput.getFaceSearchOutputs().size()); for (final RekognizedOutput.FaceSearchOutput faceSearchOutput : rekognizedOutput.getFaceSearchOutputs()) { final FaceType detectedFaceType; final String title; if (!faceSearchOutput.getMatchedFaceList().isEmpty()) { // Taking First match as Rekognition returns set of matched faces sorted by confidence level final MatchedFace matchedFace = faceSearchOutput.getMatchedFaceList().get(0); final String externalImageId = matchedFace.getFace().getExternalImageId(); // Rekognition doesn't allow any extra attributes/tags to be associated with the 'Face'. // External Image Id is used here to draw title on top of the bounding box and change color // of the bounding box (based on the FaceType). External Image Id needs to be specified in // below format in order get this working. // Eg: PersonName1-Criminal, PersonName2-Trusted, PersonName3-Intruder etc. if (externalImageId == null) { // If the external image id is not specified, then draw confidence level as title. title = matchedFace.getFace().getConfidence() + ""; detectedFaceType = FaceType.NOT_RECOGNIZED; } else { final String[] imageIds = externalImageId.split(DELIMITER); if (imageIds.length > 1) { title = imageIds[0]; detectedFaceType = FaceType.fromString(imageIds[1]); } else { title = "No prefix"; detectedFaceType = FaceType.NOT_RECOGNIZED; } } log.debug("Number of matched faces for the detected face {}", faceSearchOutput.getMatchedFaceList().size()); } else { detectedFaceType = FaceType.NOT_RECOGNIZED; title = "Not recognized"; } drawFaces(g2, width, height, faceSearchOutput.getDetectedFace().getBoundingBox(), title, detectedFaceType.getColor()); } } } } private void drawFaces(final Graphics2D g2, final int width, final int height, final BoundingBox boundingBox, final String personName, final Color color) { final Color c = g2.getColor(); g2.setColor(color); // Draw bounding box drawBoundingBox(g2, width, height, boundingBox); // Draw title drawFaceTitle(g2, width, height, boundingBox, personName); g2.setColor(c); } private void drawFaceTitle(final Graphics2D g2, final int width, final int height, final BoundingBox boundingBox, final String personName) { final int left = (int) (boundingBox.getLeft() * width); final int top = (int) (boundingBox.getTop() * height); g2.drawString(personName, left, top); } private void drawBoundingBox(final Graphics2D g2, final int width, final int height, final BoundingBox boundingBox) { final int left = (int) (boundingBox.getLeft() * width); final int top = (int) (boundingBox.getTop() * height); final int bbWidth = (int) (boundingBox.getWidth() * width); final int bbHeight = (int) (boundingBox.getHeight() * height); // Draw bounding box g2.drawRect(left, top, bbWidth, bbHeight); } public void setImage(final BufferedImage bufferedImage, final RekognizedOutput rekognizedOutput) { this.image = bufferedImage; processRekognitionOutput(image.createGraphics(), image.getWidth(), image.getHeight(), rekognizedOutput); repaint(); } }
5,405
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/ListFragmentWorker.java
package com.amazonaws.kinesisvideo.parser.examples; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideo; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoArchivedMedia; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoArchivedMediaClient; import com.amazonaws.services.kinesisvideo.model.*; import lombok.extern.slf4j.Slf4j; /* This worker retrieves all fragments within the specified TimestampRange from a specified Kinesis Video Stream and returns them in a list */ @Slf4j public class ListFragmentWorker extends KinesisVideoCommon implements Callable { private final FragmentSelector fragmentSelector; private final AmazonKinesisVideoArchivedMedia amazonKinesisVideoArchivedMedia; private final long fragmentsPerRequest = 100; public ListFragmentWorker(final String streamName, final AWSCredentialsProvider awsCredentialsProvider, final String endPoint, final Regions region, final FragmentSelector fragmentSelector) { super(region, awsCredentialsProvider, streamName); this.fragmentSelector = fragmentSelector; amazonKinesisVideoArchivedMedia = AmazonKinesisVideoArchivedMediaClient .builder() .withCredentials(awsCredentialsProvider) .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, region.getName())) .build(); } public static ListFragmentWorker create(final String streamName, final AWSCredentialsProvider awsCredentialsProvider, final Regions region, final AmazonKinesisVideo amazonKinesisVideo, final FragmentSelector fragmentSelector) { final GetDataEndpointRequest request = new GetDataEndpointRequest() .withAPIName(APIName.LIST_FRAGMENTS).withStreamName(streamName); final String endpoint = amazonKinesisVideo.getDataEndpoint(request).getDataEndpoint(); return new ListFragmentWorker( streamName, awsCredentialsProvider, endpoint, region, fragmentSelector); } @Override public List<String> call() { List<String> fragmentNumbers = new ArrayList<>(); try { log.info("Start ListFragment worker on stream {}", streamName); ListFragmentsRequest request = new ListFragmentsRequest() .withStreamName(streamName).withFragmentSelector(fragmentSelector).withMaxResults(fragmentsPerRequest); ListFragmentsResult result = amazonKinesisVideoArchivedMedia.listFragments(request); log.info("List Fragments called on stream {} response {} request ID {}", streamName, result.getSdkHttpMetadata().getHttpStatusCode(), result.getSdkResponseMetadata().getRequestId()); for (Fragment f: result.getFragments()) { fragmentNumbers.add(f.getFragmentNumber()); } String nextToken = result.getNextToken(); /* If result is truncated, keep making requests until nextToken is empty */ while (nextToken != null) { request = new ListFragmentsRequest() .withStreamName(streamName).withNextToken(nextToken); result = amazonKinesisVideoArchivedMedia.listFragments(request); for (Fragment f: result.getFragments()) { fragmentNumbers.add(f.getFragmentNumber()); } nextToken = result.getNextToken(); } Collections.sort(fragmentNumbers); for (String f: fragmentNumbers) { log.info("Retrieved fragment number {} ", f); } } catch (Throwable t) { log.error("Failure in ListFragmentWorker for streamName {} {}", streamName, t.toString()); throw t; } finally { log.info("Retrieved {} Fragments and exiting ListFragmentWorker for stream {}", fragmentNumbers.size(), streamName); return fragmentNumbers; } } }
5,406
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/KinesisVideoGStreamerPiperExample.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.kinesisvideo.parser.utilities.consumer.MergedOutputPiperFactory; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideo; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoClientBuilder; import com.amazonaws.services.kinesisvideo.model.StartSelector; import com.amazonaws.services.kinesisvideo.model.StartSelectorType; import lombok.Builder; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Example for continuously piping the output of GetMedia calls from a Kinesis Video stream to GStreamer. */ @Slf4j public class KinesisVideoGStreamerPiperExample extends KinesisVideoCommon { private static final String DEFAULT_PATH_TO_GSTREAMER = "/usr/bin/gst-launch-1.0"; private static final String [] FDSRC_ARGS = new String[] { "-v", "fdsrc", "!" }; private final AmazonKinesisVideo amazonKinesisVideo; private final InputStream inputStream; private final ExecutorService executorService; private PutMediaWorker putMediaWorker; private final StreamOps streamOps; //The arguments to construct the gstreamer pipeline. //The merged output of GetMedia will be piped to the gstreamer pipeline created using these arguments. private final List<String> gStreamerPipelineArguments; @Builder private KinesisVideoGStreamerPiperExample(Regions region, String streamName, AWSCredentialsProvider credentialsProvider, InputStream inputVideoStream, String gStreamerPipelineArgument) { super(region, credentialsProvider, streamName); final AmazonKinesisVideoClientBuilder builder = AmazonKinesisVideoClientBuilder.standard(); configureClient(builder); this.amazonKinesisVideo = builder.build(); this.inputStream = inputVideoStream; this.streamOps = new StreamOps(region, streamName, credentialsProvider); this.executorService = Executors.newFixedThreadPool(2); this.gStreamerPipelineArguments = new ArrayList<>(); addGStreamerPipelineArguments(gStreamerPipelineArgument); } private void addGStreamerPipelineArguments(String gStreamerPipeLineArgument) { this.gStreamerPipelineArguments.add(pathToExecutable("PATH_TO_GSTREAMER", DEFAULT_PATH_TO_GSTREAMER)); addToPipelineArguments(FDSRC_ARGS); addToPipelineArguments(gStreamerPipeLineArgument.split("\\s+")); } private String pathToExecutable(String environmentVariable, String defaultPath) { final String environmentVariableValue = System.getenv(environmentVariable); return StringUtils.isEmpty(environmentVariableValue) ? defaultPath : environmentVariableValue; } private void addToPipelineArguments(String []pipelineArguments) { for (String pipelineArgument : pipelineArguments) { this.gStreamerPipelineArguments.add(pipelineArgument); } } /** * This method executes the example. * * @throws InterruptedException the thread is interrupted while waiting for the stream to enter the correct state. * @throws IOException fails to read video from the input stream or write to the output stream. */ public void execute () throws InterruptedException, IOException { //Create the Kinesis Video stream, deleting and recreating if necessary. streamOps.recreateStreamIfNecessary(); ContinuousGetMediaWorker getWorker = ContinuousGetMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), new StartSelector().withStartSelectorType(StartSelectorType.EARLIEST), amazonKinesisVideo, new MergedOutputPiperFactory(Optional.empty(), true, gStreamerPipelineArguments)); executorService.submit(getWorker); //Start a PutMedia worker to write data to a Kinesis Video Stream. putMediaWorker = PutMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), inputStream, amazonKinesisVideo); executorService.submit(putMediaWorker); Thread.sleep(3000); getWorker.stop(); executorService.shutdown(); executorService.awaitTermination(120, TimeUnit.SECONDS); if (!executorService.isTerminated()) { log.warn("Shutting down executor service by force"); executorService.shutdownNow(); } else { log.info("Executor service is shutdown"); } } }
5,407
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/GetMediaForFragmentListWorker.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.client.builder.AwsClientBuilder; import com.amazonaws.kinesisvideo.parser.ebml.InputStreamParserByteSource; import com.amazonaws.kinesisvideo.parser.mkv.MkvElementVisitException; import com.amazonaws.kinesisvideo.parser.mkv.MkvElementVisitor; import com.amazonaws.kinesisvideo.parser.mkv.StreamingMkvReader; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideo; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoArchivedMedia; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoArchivedMediaClient; import com.amazonaws.services.kinesisvideo.model.APIName; import com.amazonaws.services.kinesisvideo.model.GetDataEndpointRequest; import com.amazonaws.services.kinesisvideo.model.GetMediaForFragmentListRequest; import com.amazonaws.services.kinesisvideo.model.GetMediaForFragmentListResult; import lombok.extern.slf4j.Slf4j; import java.util.List; @Slf4j public class GetMediaForFragmentListWorker extends KinesisVideoCommon implements Runnable { private final AmazonKinesisVideoArchivedMedia amazonKinesisVideoArchivedMedia; private final MkvElementVisitor elementVisitor; private final List<String> fragmentNumbers; public GetMediaForFragmentListWorker(final String streamName, final List<String> fragmentNumbers, final AWSCredentialsProvider awsCredentialsProvider, final String endPoint, final Regions region, final MkvElementVisitor elementVisitor) { super(region, awsCredentialsProvider, streamName); this.fragmentNumbers = fragmentNumbers; this.elementVisitor = elementVisitor; amazonKinesisVideoArchivedMedia = AmazonKinesisVideoArchivedMediaClient .builder() .withCredentials(awsCredentialsProvider) .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endPoint, region.getName())) .build(); } public static GetMediaForFragmentListWorker create(final String streamName, final List<String> fragmentNumbers, final AWSCredentialsProvider awsCredentialsProvider, final Regions region, final AmazonKinesisVideo amazonKinesisVideo, final MkvElementVisitor elementVisitor) { final GetDataEndpointRequest request = new GetDataEndpointRequest() .withAPIName(APIName.GET_MEDIA_FOR_FRAGMENT_LIST).withStreamName(streamName); final String endpoint = amazonKinesisVideo.getDataEndpoint(request).getDataEndpoint(); return new GetMediaForFragmentListWorker( streamName, fragmentNumbers, awsCredentialsProvider, endpoint, region, elementVisitor); } @Override public void run() { try { log.info("Start GetMediaForFragmentList worker on stream {}", streamName); final GetMediaForFragmentListResult result = amazonKinesisVideoArchivedMedia.getMediaForFragmentList( new GetMediaForFragmentListRequest() .withFragments(fragmentNumbers) .withStreamName(streamName)); log.info("GetMediaForFragmentList called on stream {} response {} requestId {}", streamName, result.getSdkHttpMetadata().getHttpStatusCode(), result.getSdkResponseMetadata().getRequestId()); final StreamingMkvReader mkvStreamReader = StreamingMkvReader.createDefault( new InputStreamParserByteSource(result.getPayload())); log.info("StreamingMkvReader created for stream {} ", streamName); try { mkvStreamReader.apply(this.elementVisitor); } catch (final MkvElementVisitException e) { log.error("Exception while accepting visitor {}", e); } } catch (final Throwable t) { log.error("Failure in GetMediaForFragmentListWorker for streamName {} {}", streamName, t); throw t; } finally { log.info("Exiting GetMediaWorker for stream {}", streamName); } } }
5,408
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/KinesisVideoExample.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.profile.ProfileCredentialsProvider; import com.amazonaws.kinesisvideo.parser.ebml.MkvTypeInfos; import com.amazonaws.kinesisvideo.parser.mkv.MkvDataElement; import com.amazonaws.kinesisvideo.parser.mkv.MkvElementVisitException; import com.amazonaws.kinesisvideo.parser.mkv.MkvElementVisitor; import com.amazonaws.kinesisvideo.parser.mkv.MkvEndMasterElement; import com.amazonaws.kinesisvideo.parser.mkv.MkvStartMasterElement; import com.amazonaws.kinesisvideo.parser.mkv.visitors.CompositeMkvElementVisitor; import com.amazonaws.kinesisvideo.parser.utilities.FragmentMetadataVisitor; import com.amazonaws.kinesisvideo.parser.utilities.OutputSegmentMerger; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideo; import com.amazonaws.services.kinesisvideo.AmazonKinesisVideoClientBuilder; import com.amazonaws.services.kinesisvideo.model.StartSelector; import com.amazonaws.services.kinesisvideo.model.StartSelectorType; import lombok.Builder; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import java.io.BufferedOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Example for integrating with Kinesis Video. * This example does: * 1. Create a stream, deleting and recreating if the stream of the same name already exists. * It sets the retention period of the created stream to 48 hours. * 2. Call PutMedia to stream video fragments into the stream. * 3. Simultaneously call GetMedia to stream video fragments out of the stream. * 4. It uses the StreamingMkvParser to parse the returned the stream and perform these steps: * 4.1 The GetMedia output stream has one mkv segment for each fragment. Merge the mkv segments that share track * information into a single segment. * 4.2 Log when we receive the start and end of each fragment including the fragment sequence number and * millis behind now. * * */ @Slf4j public class KinesisVideoExample extends KinesisVideoCommon { private static final long SLEEP_PERIOD_MILLIS = TimeUnit.SECONDS.toMillis(3); private static final int DATA_RETENTION_IN_HOURS = 48; private final AmazonKinesisVideo amazonKinesisVideo; private final InputStream inputStream; private final ExecutorService executorService; private PutMediaWorker putMediaWorker; private final StreamOps streamOps; private GetMediaProcessingArguments getMediaProcessingArguments; private boolean noSampleInputRequired = false; @Builder private KinesisVideoExample(Regions region, String streamName, AWSCredentialsProvider credentialsProvider, InputStream inputVideoStream, boolean noSampleInputRequired) { super(region, credentialsProvider, streamName); final AmazonKinesisVideoClientBuilder builder = AmazonKinesisVideoClientBuilder.standard(); configureClient(builder); this.amazonKinesisVideo = builder.build(); this.inputStream = inputVideoStream; this.streamOps = new StreamOps(region, streamName, credentialsProvider); this.executorService = Executors.newFixedThreadPool(2); this.noSampleInputRequired = noSampleInputRequired; } /** * This method executes the example. * * @throws InterruptedException the thread is interrupted while waiting for the stream to enter the correct state. * @throws IOException fails to read video from the input stream or write to the output stream. */ public void execute () throws InterruptedException, IOException { //Create the Kinesis Video stream if it doesn't exist. streamOps.createStreamIfNotExist(); getMediaProcessingArguments = GetMediaProcessingArguments.create(); try (GetMediaProcessingArguments getMediaProcessingArgumentsLocal = getMediaProcessingArguments) { //Start a GetMedia worker to read and process data from the Kinesis Video Stream. GetMediaWorker getMediaWorker = GetMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), new StartSelector().withStartSelectorType(StartSelectorType.NOW), amazonKinesisVideo, getMediaProcessingArgumentsLocal.getMkvElementVisitor()); executorService.submit(getMediaWorker); if (!noSampleInputRequired) { //Start a PutMedia worker to write data to a Kinesis Video Stream. putMediaWorker = PutMediaWorker.create(getRegion(), getCredentialsProvider(), getStreamName(), inputStream, amazonKinesisVideo); executorService.submit(putMediaWorker); } //Wait for the workers to finish. executorService.shutdown(); executorService.awaitTermination(120, TimeUnit.SECONDS); if (!executorService.isTerminated()) { log.warn("Shutting down executor service by force"); executorService.shutdownNow(); } else { log.info("Executor service is shutdown"); } } } public long getFragmentsPersisted() { return putMediaWorker.getNumFragmentsPersisted(); } public long getFragmentsRead() { return getMediaProcessingArguments.getFragmentCount(); } @RequiredArgsConstructor private static class LogVisitor extends MkvElementVisitor { private final FragmentMetadataVisitor fragmentMetadataVisitor; @Getter private long fragmentCount = 0; @Override public void visit(MkvStartMasterElement startMasterElement) throws MkvElementVisitException { if (MkvTypeInfos.EBML.equals(startMasterElement.getElementMetaData().getTypeInfo())) { fragmentCount++; log.info("Start of segment {} ", fragmentCount); } } @Override public void visit(MkvEndMasterElement endMasterElement) throws MkvElementVisitException { if (MkvTypeInfos.SEGMENT.equals(endMasterElement.getElementMetaData().getTypeInfo())) { log.info("End of segment {} fragment # {} millisBehindNow {} ", fragmentCount, fragmentMetadataVisitor.getCurrentFragmentMetadata().get().getFragmentNumberString(), fragmentMetadataVisitor.getMillisBehindNow().getAsLong()); } } @Override public void visit(MkvDataElement dataElement) throws MkvElementVisitException { } } private static class GetMediaProcessingArguments implements Closeable { private final BufferedOutputStream outputStream; private final LogVisitor logVisitor; @Getter private final CompositeMkvElementVisitor mkvElementVisitor; public GetMediaProcessingArguments(BufferedOutputStream outputStream, LogVisitor logVisitor, CompositeMkvElementVisitor mkvElementVisitor) { this.outputStream = outputStream; this.mkvElementVisitor = mkvElementVisitor; this.logVisitor = logVisitor; } public static GetMediaProcessingArguments create() throws IOException { //Fragment metadata visitor to extract Kinesis Video fragment metadata from the GetMedia stream. FragmentMetadataVisitor fragmentMetadataVisitor = FragmentMetadataVisitor.create(); //A visitor used to log as the GetMedia stream is processed. LogVisitor logVisitor = new LogVisitor(fragmentMetadataVisitor); //An OutputSegmentMerger to combine multiple segments that share track and ebml metadata into one //mkv segment. OutputStream fileOutputStream = Files.newOutputStream(Paths.get("kinesis_video_example_merged_output2.mkv"), StandardOpenOption.WRITE, StandardOpenOption.CREATE); BufferedOutputStream outputStream = new BufferedOutputStream(fileOutputStream); OutputSegmentMerger outputSegmentMerger = OutputSegmentMerger.createDefault(outputStream); //A composite visitor to encapsulate the three visitors. CompositeMkvElementVisitor mkvElementVisitor = new CompositeMkvElementVisitor(fragmentMetadataVisitor, outputSegmentMerger, logVisitor); return new GetMediaProcessingArguments(outputStream, logVisitor, mkvElementVisitor); } @Override public void close() throws IOException { outputStream.close(); } public long getFragmentCount() { return logVisitor.fragmentCount; } } }
5,409
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/H264FrameProcessor.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.kinesisvideo.client.KinesisVideoClient; import com.amazonaws.kinesisvideo.client.mediasource.CameraMediaSourceConfiguration; import com.amazonaws.kinesisvideo.common.exception.KinesisVideoException; import com.amazonaws.kinesisvideo.java.client.KinesisVideoJavaClientFactory; import com.amazonaws.kinesisvideo.parser.examples.BoundingBoxImagePanel; import com.amazonaws.kinesisvideo.parser.mkv.Frame; import com.amazonaws.kinesisvideo.parser.mkv.FrameProcessException; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognizedOutput; import com.amazonaws.kinesisvideo.parser.utilities.FragmentMetadata; import com.amazonaws.kinesisvideo.parser.utilities.FrameVisitor; import com.amazonaws.kinesisvideo.parser.utilities.H264FrameDecoder; import com.amazonaws.kinesisvideo.parser.utilities.H264FrameEncoder; import com.amazonaws.kinesisvideo.parser.utilities.MkvTrackMetadata; import com.amazonaws.kinesisvideo.parser.utilities.ProducerStreamUtil; import com.amazonaws.kinesisvideo.producer.StreamInfo; import com.amazonaws.regions.Regions; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import java.awt.image.BufferedImage; import java.util.List; import java.util.Optional; import static com.google.common.base.Preconditions.checkState; @Slf4j public class H264FrameProcessor implements FrameVisitor.FrameProcessor { private static final int VIDEO_TRACK_NO = 1; private static final int MILLIS_IN_SEC = 1000; private static final int OFFSET_DELTA_THRESHOLD = 10; private final BoundingBoxImagePanel boundingBoxImagePanel; private final Regions regionName; private RekognizedOutput currentRekognizedOutput = null; private H264FrameEncoder h264Encoder; private H264FrameDecoder h264Decoder; private KVSMediaSource KVSMediaSource; private boolean isKVSProducerInitialized = false; private boolean isEncoderInitialized = false; private final AWSCredentialsProvider credentialsProvider; private final String outputKvsStreamName; @Setter private List<RekognizedOutput> rekognizedOutputs; @Setter private int frameBitRate = 1024; private int frameNo = 0; private int currentWidth = 0; private int currentHeight = 0; private long keyFrameTimecode; private H264FrameProcessor(final AWSCredentialsProvider credentialsProvider, final String outputKvsStreamName, final Regions regionName) { this.boundingBoxImagePanel = new BoundingBoxImagePanel(); this.credentialsProvider = credentialsProvider; this.outputKvsStreamName = outputKvsStreamName; this.regionName = regionName; this.h264Decoder = new H264FrameDecoder(); } private void initializeKinesisVideoProducer(final int width, final int height, final byte[] cpd) { try { log.info("Initializing KVS Producer with stream name {} and region : {}", outputKvsStreamName, regionName); final KinesisVideoClient kinesisVideoClient = KinesisVideoJavaClientFactory .createKinesisVideoClient(regionName, credentialsProvider); final CameraMediaSourceConfiguration configuration = new CameraMediaSourceConfiguration.Builder() .withFrameRate(30) .withRetentionPeriodInHours(1) .withCameraId("/dev/video0") .withIsEncoderHardwareAccelerated(false) .withEncodingMimeType("video/avc") .withNalAdaptationFlags(StreamInfo.NalAdaptationFlags.NAL_ADAPTATION_ANNEXB_NALS) .withIsAbsoluteTimecode(true) .withEncodingBitRate(200000) .withHorizontalResolution(width) .withVerticalResolution(height) .withCodecPrivateData(cpd) .build(); this.KVSMediaSource = new KVSMediaSource( ProducerStreamUtil.toStreamInfo(outputKvsStreamName, configuration)); this.KVSMediaSource.configure(configuration); // register media source with Kinesis Video Client kinesisVideoClient.registerMediaSource(KVSMediaSource); } catch (final KinesisVideoException e) { log.error("Exception while initialize KVS Producer !", e); } } public void resetEncoder() { // Reset frame count for this fragment if (this.isEncoderInitialized) { this.frameNo = 0; this.h264Encoder.setFrameNumber(frameNo); } else { throw new IllegalStateException("Encoder not initialized !"); } } public static H264FrameProcessor create(final AWSCredentialsProvider credentialsProvider, final String rekognizedStreamName, final Regions regionName) { return new H264FrameProcessor(credentialsProvider, rekognizedStreamName, regionName); } /** * Process Rekognized outputs for each rekognized output. For each kinesis event record i.e for each * fragment number create a call getMediaForFragmentList, parse fragments, decode frame, draw bounding box, * encode frame, call KVS PutFrame. */ @Override public void process(final Frame frame, final MkvTrackMetadata trackMetadata, final Optional<FragmentMetadata> fragmentMetadata) throws FrameProcessException { if (rekognizedOutputs != null) { // Process only for video frames if (frame.getTrackNumber() == VIDEO_TRACK_NO) { checkState(trackMetadata.getPixelWidth().isPresent() && trackMetadata.getPixelHeight().isPresent(), "Missing video resolution in track metadata !"); checkState(fragmentMetadata.isPresent(), "FragmentMetadata should be present !"); // Decode H264 frame final BufferedImage decodedFrame = h264Decoder.decodeH264Frame(frame, trackMetadata); log.debug("Decoded frame : {} with timecode : {} and fragment metadata : {}", frameNo, frame.getTimeCode(), fragmentMetadata.get()); // Get Rekognition results for this fragment number final Optional<RekognizedOutput> rekognizedOutput = findRekognizedOutputForFrame(frame, fragmentMetadata); // Render frame with bounding box final BufferedImage compositeFrame = renderFrame(decodedFrame, rekognizedOutput); // Encode to H264 frame final EncodedFrame encodedH264Frame = encodeH264Frame(compositeFrame); encodedH264Frame.setTimeCode(fragmentMetadata.get().getProducerSideTimestampMillis() + frame.getTimeCode()); log.debug("Encoded frame : {} with timecode : {}", frameNo, encodedH264Frame.getTimeCode()); // Call PutFrame for processed encodedFrame. putFrame(encodedH264Frame, trackMetadata.getPixelWidth().get().intValue(), trackMetadata.getPixelHeight().get().intValue()); frameNo++; } else { log.debug("Skipping audio frames !"); } } else { log.warn("Rekognition output is empty"); } } private void putFrame(final EncodedFrame encodedH264Frame, final int width, final int height) { if (!isKVSProducerInitialized) { log.info("Initializing JNI..."); initializeKinesisVideoProducer(width, height, encodedH264Frame.getCpd().array()); isKVSProducerInitialized = true; } KVSMediaSource.putFrameData(encodedH264Frame); log.debug("PutFrame successful for frame no : {}", frameNo); } private EncodedFrame encodeH264Frame(final BufferedImage bufferedImage) { try { initializeEncoder(bufferedImage); return h264Encoder.encodeFrame(bufferedImage); } catch (final Exception e) { throw new RuntimeException("Unable to encode the bufferedImage !", e); } } private void initializeEncoder(final BufferedImage bufferedImage) { // Initialize the encoder if it's not initialized or if the current frame resolution changes from previous one. if (!isEncoderInitialized || (currentWidth != bufferedImage.getWidth() || currentHeight != bufferedImage.getHeight())) { this.h264Encoder = new H264FrameEncoder(bufferedImage.getWidth(), bufferedImage.getHeight(), frameBitRate); this.isEncoderInitialized = true; this.currentWidth = bufferedImage.getWidth(); this.currentHeight = bufferedImage.getHeight(); } } private Optional<RekognizedOutput> findRekognizedOutputForFrame(final Frame frame, final Optional<FragmentMetadata> fragmentMetadata) { Optional<RekognizedOutput> rekognizedOutput = Optional.empty(); if (fragmentMetadata.isPresent()) { final String fragmentNumber = fragmentMetadata.get().getFragmentNumberString(); // Currently Rekognition samples frames and calculates the frame offset from the fragment start time. // So, in order to match with rekognition results, we have to compute the same frame offset from the // beginning of the fragments. if (frame.isKeyFrame()) { keyFrameTimecode = frame.getTimeCode(); log.debug("Key frame timecode : {}", keyFrameTimecode); } final long frameOffset = (frame.getTimeCode() > keyFrameTimecode) ? frame.getTimeCode() - keyFrameTimecode : 0; log.debug("Current Fragment Number : {} Computed Frame offset : {}", fragmentNumber, frameOffset); if (log.isDebugEnabled()) { this.rekognizedOutputs .forEach(p -> log.debug("frameOffsetInSeconds from Rekognition : {}", p.getFrameOffsetInSeconds())); } // Check whether the computed offset matches the rekognized output frame offset. Rekognition // output is in seconds whereas the frame offset is calculated in milliseconds. // NOTE: Rekognition frame offset doesn't exactly match with the computed offset below. So // take the closest one possible within 10ms delta. rekognizedOutput = this.rekognizedOutputs.stream() .filter(p -> isOffsetDeltaWithinThreshold(frameOffset, p)) .findFirst(); // Remove from the index once the RekognizedOutput is processed. Else it would increase the memory // footprint and blow up the JVM. if (rekognizedOutput.isPresent()) { log.debug("Computed offset matched with retrieved offset. Delta : {}", Math.abs(frameOffset - (rekognizedOutput.get().getFrameOffsetInSeconds() * MILLIS_IN_SEC))); if (this.rekognizedOutputs.isEmpty()) { log.debug("All frames processed for this fragment number : {}", fragmentNumber); } } } return rekognizedOutput; } private boolean isOffsetDeltaWithinThreshold(final long frameOffset, final RekognizedOutput output) { return Math.abs(frameOffset - (output.getFrameOffsetInSeconds() * MILLIS_IN_SEC)) <= OFFSET_DELTA_THRESHOLD; } @SuppressWarnings("Duplicates") private BufferedImage renderFrame(final BufferedImage bufferedImage, final Optional<RekognizedOutput> rekognizedOutput) { if (rekognizedOutput.isPresent()) { log.debug("Rendering Rekognized sampled frame..."); boundingBoxImagePanel.processRekognitionOutput(bufferedImage.createGraphics(), bufferedImage.getWidth(), bufferedImage.getHeight(), rekognizedOutput.get()); currentRekognizedOutput = rekognizedOutput.get(); } else if (currentRekognizedOutput != null) { log.debug("Rendering non-sampled frame with previous rekognized results..."); boundingBoxImagePanel.processRekognitionOutput(bufferedImage.createGraphics(), bufferedImage.getWidth(), bufferedImage.getHeight(), currentRekognizedOutput); } else { log.debug("Rendering frame without any rekognized results..."); } return bufferedImage; } }
5,410
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/KinesisVideoRekognitionLambdaExample.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.kinesisvideo.parser.examples.GetMediaForFragmentListWorker; import com.amazonaws.kinesisvideo.parser.examples.StreamOps; import com.amazonaws.kinesisvideo.parser.kinesis.KinesisDataStreamsWorker; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.DetectedFace; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.FaceSearchResponse; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.MatchedFace; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognitionOutput; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognizedFragmentsIndex; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognizedOutput; import com.amazonaws.kinesisvideo.parser.utilities.FrameVisitor; import com.amazonaws.regions.Regions; import com.amazonaws.services.kinesis.model.Record; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import com.amazonaws.services.lambda.runtime.events.KinesisEvent; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.extern.slf4j.Slf4j; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.stream.Collectors; /** * Lambda example integratong Rekognition outputs with Kinesis Video streams fragments. This examples is triggered * when Rekognition publishes events in Kinesis Data Streams (KDS). It gets the corresponding fragments from * Kinesis Video Streams (KVS), decodes each frame, overlays bounding box on top of faces detected, encodes the * frame again (using Jcodec) and then publishes into new derived Kinesis Video streams. The new stream can be * viewed using Kinesis Video Streams console or using HLS playback. * * NOTE: For Instructions to run this Lambda, please refer README. * NOTE: As this lambda executes resource intense decoding and encoding (using Jcodec which is not optimal * https://github.com/jcodec/jcodec#performance--quality-considerations), the new Kinesis Video stream might be delayed significantly. */ @Slf4j public final class KinesisVideoRekognitionLambdaExample implements RequestHandler<KinesisEvent, Context> { private static final int NUM_RETRIES = 10; private static final int KCL_INIT_DELAY_MILLIS = 10_000; private final ExecutorService kdsWorkers = Executors.newFixedThreadPool(100); private final AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain(); private final RekognizedFragmentsIndex rekognizedFragmentsIndex = new RekognizedFragmentsIndex(); private String inputKvsStreamName; private String outputKvsStreamName; private StreamOps kvsClient; private FragmentCheckpointManager fragmentCheckpointManager; private H264FrameProcessor h264FrameProcessor; /** * Main method to test the integration locally in desktop. * * NOTE: This uses a different approach to get the KDS events using KCL to trigger lambda. * */ public static void main(final String[] args) throws Exception { final KinesisVideoRekognitionLambdaExample KinesisVideoRekognitionLambdaExample = new KinesisVideoRekognitionLambdaExample(); KinesisVideoRekognitionLambdaExample.initialize( System.getProperty("KVSStreamName"), Regions.fromName(System.getenv("AWS_REGION"))); KinesisVideoRekognitionLambdaExample.startKDSWorker(System.getProperty("KDSStreamName")); Thread.sleep(KCL_INIT_DELAY_MILLIS); // Initial delay to wait for KCL to initialize while (true) { // For local desktop testing. KinesisVideoRekognitionLambdaExample.processRekognizedOutputs(); } } /** * Initialize method to set variables. */ private void initialize(final String kvsStreamName, final Regions regionName) { this.inputKvsStreamName = kvsStreamName; outputKvsStreamName = kvsStreamName + "-Rekognized"; kvsClient = new StreamOps(regionName, kvsStreamName, credentialsProvider); h264FrameProcessor = H264FrameProcessor.create(credentialsProvider, outputKvsStreamName, regionName); fragmentCheckpointManager = new DDBBasedFragmentCheckpointManager(kvsClient.getRegion(), credentialsProvider); log.info("Initialized with input KVS stream: {}, output {}, region : {}", inputKvsStreamName, outputKvsStreamName, regionName); } /** * Process Rekognized outputs for each rekognized output. For each kinesis event record i.e for each * fragment number create a call getMediaForFragmentList, parse fragments, decode frame, draw bounding box, * encode frame, call KVS PutFrame. * * @throws InterruptedException */ private void processRekognizedOutputs() throws InterruptedException { // Get the last processed fragment number if any final Optional<FragmentCheckpoint> lastFragmentNumber = fragmentCheckpointManager .getLastProcessedItem(inputKvsStreamName); String fragmentNumber = null; while (!rekognizedFragmentsIndex.isEmpty()) { final RekognizedFragmentsIndex.RekognizedFragment rekognizedFragment = rekognizedFragmentsIndex.poll(); fragmentNumber = rekognizedFragment.getFragmentNumber(); final List<RekognizedOutput> rekognizedOutputList = rekognizedFragment.getRekognizedOutputs(); if (lastFragmentNumber.isPresent() && (fragmentNumber.equals(lastFragmentNumber.get().getFragmentNumber()) || rekognizedFragment.getServerTime() <= lastFragmentNumber.get().getServerTime())) { // If the current fragment number is equal to the last processed fragment number or if the current // fragment's server time is older than or equal than last processed fragment's server time then // skip this fragment number and proceed to next fragment. log.info("Current fragment number : {} is already processed or older than last processed fragment. " + "So skipping..", fragmentNumber); continue; } try { final FrameVisitor frameVisitor = FrameVisitor.create(h264FrameProcessor); final GetMediaForFragmentListWorker worker = GetMediaForFragmentListWorker.create( kvsClient.getStreamName(), Collections.singletonList(fragmentNumber), kvsClient.getCredentialsProvider(), kvsClient.getRegion(), kvsClient.getAmazonKinesisVideo(), frameVisitor); h264FrameProcessor.setRekognizedOutputs(rekognizedOutputList); worker.run(); // For every fragment, the rekognition output needs to be set and the encoder needs to be reset // as the JCodec encoder always treats first frame as IDR frame h264FrameProcessor.resetEncoder(); log.info("Fragment {} processed successfully ...", fragmentNumber); // Once the current fragment number is processed save it as a checkpoint. fragmentCheckpointManager.saveCheckPoint(inputKvsStreamName, fragmentNumber, rekognizedFragment.getProducerTime(), rekognizedFragment.getServerTime()); } catch (final Exception e) { log.error("Error while processing fragment number: {}", fragmentNumber, e); } } } /** * Start Kinesis Data Streams worker. */ public void startKDSWorker(final String kdsStreamName) { final KinesisDataStreamsWorker kinesisDataStreamsWorker = KinesisDataStreamsWorker.create(Regions.US_WEST_2, credentialsProvider, kdsStreamName, rekognizedFragmentsIndex); kdsWorkers.submit(kinesisDataStreamsWorker); } /** * Handle request for each lambda event. * * @param kinesisEvent Each kinesis event which describes the Rekognition output. * @param context Lambda context * @return context */ @Override public Context handleRequest(final KinesisEvent kinesisEvent, final Context context) { try { initialize(System.getProperty("KVSStreamName"), Regions.fromName(System.getenv("AWS_REGION"))); loadProducerJNI(context); final List<Record> records = kinesisEvent.getRecords() .stream() .map(KinesisEvent.KinesisEventRecord::getKinesis) .collect(Collectors.toList()); processRecordsWithRetries(records); processRekognizedOutputs(); } catch (final Exception e) { log.error("Unable to process lambda request !. Exiting... ", e); } return context; } /** * Load pre-built binary of Kinesis Video Streams Producer JNI. * * @param context */ private void loadProducerJNI(final Context context) throws IOException { log.info("Context : {}", context); log.info("Working Directory = {}", System.getProperty("user.dir")); log.info("Java library path = {}", System.getProperty("java.library.path")); log.info("Class path %s", this.getClass().getProtectionDomain().getCodeSource().getLocation()); log.info("Loading JNI .so file.."); final ClassLoader classLoader = getClass().getClassLoader(); final File cityFile = new File(classLoader.getResource("libKinesisVideoProducerJNI.so").getFile()); System.load(cityFile.getAbsolutePath()); log.info("Loaded JNI from {}", cityFile.getAbsolutePath()); } /** * Process records performing retries as needed. Skip "poison pill" records. * * @param records Data records to be processed. */ private void processRecordsWithRetries(final List<Record> records) { for (final Record record : records) { boolean processedSuccessfully = false; for (int i = 0; i < NUM_RETRIES; i++) { try { log.info("Processing single record..."); processSingleRecord(record); processedSuccessfully = true; break; } catch (final Throwable t) { log.error("Caught throwable while processing record {}", record, t); } } if (!processedSuccessfully) { log.warn("Couldn't processRekognizedOutputs record {}. Skipping the record.", record); } } log.info("Processed all {} KDS records.", records.size()); } /** * Process a single record. * * @param record The record to be processed. */ private void processSingleRecord(final Record record) { String data = null; final ObjectMapper mapper = new ObjectMapper(); try { final ByteBuffer buffer = record.getData(); data = new String(buffer.array(), "UTF-8"); final RekognitionOutput output = mapper.readValue(data, RekognitionOutput.class); // Get the fragment number from Rekognition Output final String fragmentNumber = output .getInputInformation() .getKinesisVideo() .getFragmentNumber(); final Double frameOffsetInSeconds = output .getInputInformation() .getKinesisVideo() .getFrameOffsetInSeconds(); final Double serverTimestamp = output .getInputInformation() .getKinesisVideo() .getServerTimestamp(); final Double producerTimestamp = output .getInputInformation() .getKinesisVideo() .getProducerTimestamp(); final double detectedTime = output.getInputInformation().getKinesisVideo().getServerTimestamp() + output.getInputInformation().getKinesisVideo().getFrameOffsetInSeconds() * 1000L; final RekognizedOutput rekognizedOutput = RekognizedOutput.builder() .fragmentNumber(fragmentNumber) .serverTimestamp(serverTimestamp) .producerTimestamp(producerTimestamp) .frameOffsetInSeconds(frameOffsetInSeconds) .detectedTime(detectedTime) .build(); // Add face search response final List<FaceSearchResponse> responses = output.getFaceSearchResponse(); responses.forEach(response -> { final DetectedFace detectedFace = response.getDetectedFace(); final List<MatchedFace> matchedFaces = response.getMatchedFaces(); final RekognizedOutput.FaceSearchOutput faceSearchOutput = RekognizedOutput.FaceSearchOutput.builder() .detectedFace(detectedFace) .matchedFaceList(matchedFaces) .build(); rekognizedOutput.addFaceSearchOutput(faceSearchOutput); }); // Add it to the index log.info("Found Rekognized results for fragment number : {}", fragmentNumber); rekognizedFragmentsIndex.add(fragmentNumber, producerTimestamp.longValue(), serverTimestamp.longValue(), rekognizedOutput); } catch (final NumberFormatException e) { log.warn("Record does not match sample record format. Ignoring record with data : {}", data, e); } catch (final Exception e) { log.error("Unable to process record !", e); } } }
5,411
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/KVSMediaSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import com.amazonaws.kinesisvideo.client.mediasource.CameraMediaSourceConfiguration; import com.amazonaws.kinesisvideo.client.mediasource.MediaSourceState; import com.amazonaws.kinesisvideo.common.exception.KinesisVideoException; import com.amazonaws.kinesisvideo.internal.client.mediasource.MediaSource; import com.amazonaws.kinesisvideo.internal.client.mediasource.MediaSourceConfiguration; import com.amazonaws.kinesisvideo.internal.client.mediasource.MediaSourceSink; import com.amazonaws.kinesisvideo.parser.utilities.ProducerStreamUtil; import com.amazonaws.kinesisvideo.producer.KinesisVideoFrame; import com.amazonaws.kinesisvideo.producer.StreamCallbacks; import com.amazonaws.kinesisvideo.producer.StreamInfo; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import javax.annotation.Nullable; import java.nio.ByteBuffer; @Slf4j @RequiredArgsConstructor public class KVSMediaSource implements MediaSource { private static final int FRAME_FLAG_KEY_FRAME = 1; private static final int FRAME_FLAG_NONE = 0; private static final long HUNDREDS_OF_NANOS_IN_MS = 10 * 1000; private static final long FRAME_DURATION_20_MS = 20L; private CameraMediaSourceConfiguration cameraMediaSourceConfiguration; private MediaSourceState mediaSourceState; private MediaSourceSink mediaSourceSink; private int frameIndex; private final StreamInfo streamInfo; private void putFrame(final KinesisVideoFrame kinesisVideoFrame) { try { mediaSourceSink.onFrame(kinesisVideoFrame); } catch (final KinesisVideoException ex) { throw new RuntimeException(ex); } } @Override public MediaSourceState getMediaSourceState() { return mediaSourceState; } @Override public MediaSourceConfiguration getConfiguration() { return cameraMediaSourceConfiguration; } @Override public StreamInfo getStreamInfo() throws KinesisVideoException { return streamInfo; } @Override public void initialize(final MediaSourceSink mediaSourceSink) { this.mediaSourceSink = mediaSourceSink; } @Override public void configure(final MediaSourceConfiguration configuration) { if (!(configuration instanceof CameraMediaSourceConfiguration)) { throw new IllegalStateException("Configuration must be an instance of CameraMediaSourceConfiguration"); } this.cameraMediaSourceConfiguration = (CameraMediaSourceConfiguration) configuration; this.frameIndex = 0; } @Override public void start() { mediaSourceState = MediaSourceState.RUNNING; } public void putFrameData(final EncodedFrame encodedFrame) { final int flags = encodedFrame.isKeyFrame() ? FRAME_FLAG_KEY_FRAME : FRAME_FLAG_NONE; if (encodedFrame.getByteBuffer() != null) { final KinesisVideoFrame frame = new KinesisVideoFrame( frameIndex++, flags, encodedFrame.getTimeCode() * HUNDREDS_OF_NANOS_IN_MS, encodedFrame.getTimeCode() * HUNDREDS_OF_NANOS_IN_MS, FRAME_DURATION_20_MS * HUNDREDS_OF_NANOS_IN_MS, encodedFrame.getByteBuffer()); if (frame.getSize() == 0) { return; } putFrame(frame); } else { log.info("Frame Data is null !"); } } @Override public void stop() { mediaSourceState = MediaSourceState.STOPPED; } @Override public boolean isStopped() { return mediaSourceState == MediaSourceState.STOPPED; } @Override public void free() { } @Override public MediaSourceSink getMediaSourceSink() { return mediaSourceSink; } @Nullable @Override public StreamCallbacks getStreamCallbacks() { return null; } }
5,412
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/FragmentCheckpointManager.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import java.util.Optional; /** * FragmentCheckpoint Manager interface which manages the checkpoints for last processed fragments. */ public interface FragmentCheckpointManager { /** * Get last processed fragment details from checkpoint for given stream name. * * @param streamName KVS Stream name * @return Optional of last processed fragment item if checkpoint exists. Empty otherwise */ Optional<FragmentCheckpoint> getLastProcessedItem(String streamName); /** * Save last processed fragment details checkpoint for the given stream name. * * @param streamName KVS Stream name * @param fragmentNumber Last processed fragment's fragment number * @param producerTime Last processed fragment's producer time * @param serverTime Last processed fragment's server time */ void saveCheckPoint(String streamName, String fragmentNumber, Long producerTime, Long serverTime); }
5,413
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/DDBBasedFragmentCheckpointManager.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.kinesisvideo.parser.utilities.DynamoDBHelper; import com.amazonaws.regions.Regions; import com.amazonaws.services.dynamodbv2.model.AttributeValue; import lombok.extern.slf4j.Slf4j; import java.util.Map; import java.util.Optional; /** * DynamdDB based FragmentCheckpoint Manager which manages the checkpoints for last processed fragments. */ @Slf4j public class DDBBasedFragmentCheckpointManager implements FragmentCheckpointManager { private static final String TABLE_NAME = "FragmentCheckpoint"; private static final String KVS_STREAM_NAME = "KVSStreamName"; private static final String FRAGMENT_NUMBER = "FragmentNumber"; private static final String SERVER_TIME = "ServerTime"; private static final String PRODUCER_TIME = "ProducerTime"; private static final String UPDATED_TIME = "UpdatedTime"; private final DynamoDBHelper dynamoDBHelper; public DDBBasedFragmentCheckpointManager(final Regions region, final AWSCredentialsProvider credentialsProvider) { dynamoDBHelper = new DynamoDBHelper(region, credentialsProvider); dynamoDBHelper.createTableIfDoesntExist(); } /** * Get last processed fragment details from checkpoint for given stream name. * * @param streamName KVS Stream name * @return Optional of last processed fragment item if checkpoint exists. Empty otherwise */ @Override public Optional<FragmentCheckpoint> getLastProcessedItem(final String streamName) { final Map<String, AttributeValue> result = dynamoDBHelper.getItem(streamName); if (result != null && result.containsKey(FRAGMENT_NUMBER)) { return Optional.of(new FragmentCheckpoint(streamName, result.get(FRAGMENT_NUMBER).getS(), Long.parseLong(result.get(PRODUCER_TIME).getN()), Long.parseLong(result.get(SERVER_TIME).getN()), Long.parseLong(result.get(UPDATED_TIME).getN()))); } return Optional.empty(); } /** * Save last processed fragment details checkpoint for the given stream name. * * @param streamName KVS Stream name * @param fragmentNumber Last processed fragment's fragment number * @param producerTime Last processed fragment's producer time * @param serverTime Last processed fragment's server time */ @Override public void saveCheckPoint(final String streamName, final String fragmentNumber, final Long producerTime, final Long serverTime) { if (fragmentNumber != null) { if (dynamoDBHelper.getItem(streamName) != null) { log.info("Checkpoint for stream name {} already exists. So updating checkpoint with fragment number: {}", streamName, fragmentNumber); dynamoDBHelper.updateItem(streamName, fragmentNumber, producerTime, serverTime, System.currentTimeMillis()); } else { log.info("Creating checkpoint for stream name {} with fragment number: {}", streamName, fragmentNumber); dynamoDBHelper.putItem(streamName, fragmentNumber, producerTime, serverTime, System.currentTimeMillis()); } } else { log.info("Fragment number is null. Skipping save checkpoint..."); } } }
5,414
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/FragmentCheckpoint.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import lombok.Getter; import lombok.RequiredArgsConstructor; /** * Class for the lambda checkpoint stored in DDB. */ @Getter @RequiredArgsConstructor public class FragmentCheckpoint { private final String streamName; private final String fragmentNumber; private final Long serverTime; private final Long producerTime; private final Long updatedTime; }
5,415
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/examples/lambda/EncodedFrame.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.examples.lambda; import lombok.Builder; import lombok.Getter; import lombok.Setter; import java.nio.ByteBuffer; /** * Container class for H264 encoded frame */ @Getter @Builder public class EncodedFrame { private final ByteBuffer byteBuffer; private final ByteBuffer cpd; private final boolean isKeyFrame; @Setter private long timeCode; }
5,416
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/processor/RekognitionStreamProcessor.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.processor; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.kinesisvideo.parser.rekognition.pojo.RekognitionInput; import com.amazonaws.regions.Regions; import com.amazonaws.services.rekognition.AmazonRekognition; import com.amazonaws.services.rekognition.AmazonRekognitionClientBuilder; import com.amazonaws.services.rekognition.model.CreateStreamProcessorRequest; import com.amazonaws.services.rekognition.model.CreateStreamProcessorResult; import com.amazonaws.services.rekognition.model.DeleteStreamProcessorRequest; import com.amazonaws.services.rekognition.model.DeleteStreamProcessorResult; import com.amazonaws.services.rekognition.model.DescribeStreamProcessorRequest; import com.amazonaws.services.rekognition.model.DescribeStreamProcessorResult; import com.amazonaws.services.rekognition.model.FaceSearchSettings; import com.amazonaws.services.rekognition.model.KinesisDataStream; import com.amazonaws.services.rekognition.model.KinesisVideoStream; import com.amazonaws.services.rekognition.model.ListStreamProcessorsRequest; import com.amazonaws.services.rekognition.model.ListStreamProcessorsResult; import com.amazonaws.services.rekognition.model.ResourceNotFoundException; import com.amazonaws.services.rekognition.model.StartStreamProcessorRequest; import com.amazonaws.services.rekognition.model.StartStreamProcessorResult; import com.amazonaws.services.rekognition.model.StopStreamProcessorRequest; import com.amazonaws.services.rekognition.model.StopStreamProcessorResult; import com.amazonaws.services.rekognition.model.StreamProcessor; import com.amazonaws.services.rekognition.model.StreamProcessorInput; import com.amazonaws.services.rekognition.model.StreamProcessorOutput; import com.amazonaws.services.rekognition.model.StreamProcessorSettings; import com.amazonaws.services.rekognition.model.StreamProcessorStatus; import lombok.extern.slf4j.Slf4j; /** * Rekognition Stream Processor client class which acts as a wrapper for invoking corresponding Rekognition APIs. * */ @Slf4j public class RekognitionStreamProcessor { private String streamProcessorName; private String kinesisVideoStreamArn; private String kinesisDataStreamArn; private String roleArn; private String collectionId; private float matchThreshold; private String region; private AmazonRekognition rekognitionClient; private RekognitionStreamProcessor(final Regions regions, final AWSCredentialsProvider provider, final RekognitionInput rekognitionInput) { this.streamProcessorName = rekognitionInput.getStreamingProcessorName(); this.kinesisVideoStreamArn = rekognitionInput.getKinesisVideoStreamArn(); this.kinesisDataStreamArn = rekognitionInput.getKinesisDataStreamArn(); this.roleArn = rekognitionInput.getIamRoleArn(); this.collectionId = rekognitionInput.getFaceCollectionId(); this.matchThreshold = rekognitionInput.getMatchThreshold(); rekognitionClient = AmazonRekognitionClientBuilder .standard() .withRegion(regions) .withCredentials(provider) .build(); } public static RekognitionStreamProcessor create(final Regions regions, final AWSCredentialsProvider provider, final RekognitionInput rekognitionInput) { return new RekognitionStreamProcessor(regions, provider, rekognitionInput); } /** * Creates a StreamProcess if it doesn't exist already. Once the stream processor is created, it's started and then * described to know the result of the stream processor. */ public void process() { // Creates a stream processor if it doesn't already exist and start. try { final DescribeStreamProcessorResult result = describeStreamProcessor(); if (!result.getStatus().equals(StreamProcessorStatus.RUNNING.toString())) { startStreamProcessor(); } } catch (final ResourceNotFoundException e) { log.info("StreamProcessor with name : {} doesnt exist. Creating...", streamProcessorName); createStreamProcessor(); startStreamProcessor(); } // Describe the Stream Processor results to log the status. describeStreamProcessor(); } public CreateStreamProcessorResult createStreamProcessor() { final KinesisVideoStream kinesisVideoStream = new KinesisVideoStream() .withArn(kinesisVideoStreamArn); final StreamProcessorInput streamProcessorInput = new StreamProcessorInput() .withKinesisVideoStream(kinesisVideoStream); final KinesisDataStream kinesisDataStream = new KinesisDataStream() .withArn(kinesisDataStreamArn); final StreamProcessorOutput streamProcessorOutput = new StreamProcessorOutput() .withKinesisDataStream(kinesisDataStream); final FaceSearchSettings faceSearchSettings = new FaceSearchSettings() .withCollectionId(collectionId) .withFaceMatchThreshold(matchThreshold); final StreamProcessorSettings streamProcessorSettings = new StreamProcessorSettings() .withFaceSearch(faceSearchSettings); final CreateStreamProcessorResult createStreamProcessorResult = rekognitionClient.createStreamProcessor(new CreateStreamProcessorRequest() .withInput(streamProcessorInput) .withOutput(streamProcessorOutput) .withSettings(streamProcessorSettings) .withRoleArn(roleArn) .withName(streamProcessorName)); log.info("StreamProcessorArn : {} ", createStreamProcessorResult.getStreamProcessorArn()); return createStreamProcessorResult; } public StartStreamProcessorResult startStreamProcessor() { final StartStreamProcessorResult startStreamProcessorResult = rekognitionClient.startStreamProcessor(new StartStreamProcessorRequest().withName(streamProcessorName)); log.info("SdkResponseMetadata : {} ", startStreamProcessorResult.getSdkResponseMetadata()); return startStreamProcessorResult; } public StopStreamProcessorResult stopStreamProcessor() { final StopStreamProcessorResult stopStreamProcessorResult = rekognitionClient.stopStreamProcessor(new StopStreamProcessorRequest().withName(streamProcessorName)); log.info("SdkResponseMetadata : {} ", stopStreamProcessorResult.getSdkResponseMetadata()); return stopStreamProcessorResult; } public DeleteStreamProcessorResult deleteStreamProcessor() { final DeleteStreamProcessorResult deleteStreamProcessorResult = rekognitionClient .deleteStreamProcessor(new DeleteStreamProcessorRequest().withName(streamProcessorName)); log.info("SdkResponseMetadata : {} ", deleteStreamProcessorResult.getSdkResponseMetadata()); return deleteStreamProcessorResult; } public DescribeStreamProcessorResult describeStreamProcessor() { final DescribeStreamProcessorResult describeStreamProcessorResult = rekognitionClient .describeStreamProcessor(new DescribeStreamProcessorRequest().withName(streamProcessorName)); log.info("Arn : {}", describeStreamProcessorResult.getStreamProcessorArn()); log.info("Input kinesisVideo stream : {} ", describeStreamProcessorResult.getInput().getKinesisVideoStream().getArn()); log.info("Output kinesisData stream {} ", describeStreamProcessorResult.getOutput().getKinesisDataStream().getArn()); log.info("RoleArn {} ", describeStreamProcessorResult.getRoleArn()); log.info( "CollectionId {} ", describeStreamProcessorResult.getSettings().getFaceSearch().getCollectionId()); log.info("Status {} ", describeStreamProcessorResult.getStatus()); log.info("Status message {} ", describeStreamProcessorResult.getStatusMessage()); log.info("Creation timestamp {} ", describeStreamProcessorResult.getCreationTimestamp()); log.info("Last update timestamp {} ", describeStreamProcessorResult.getLastUpdateTimestamp()); return describeStreamProcessorResult; } public ListStreamProcessorsResult listStreamProcessor() { final ListStreamProcessorsResult listStreamProcessorsResult = rekognitionClient.listStreamProcessors(new ListStreamProcessorsRequest().withMaxResults(100)); for (final StreamProcessor streamProcessor : listStreamProcessorsResult.getStreamProcessors()) { log.info("StreamProcessor name {} ", streamProcessor.getName()); log.info("Status {} ", streamProcessor.getStatus()); } return listStreamProcessorsResult; } }
5,417
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/KinesisVideo.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class KinesisVideo implements Serializable { @JsonProperty("StreamArn") private String streamArn; @JsonProperty("FragmentNumber") private String fragmentNumber; @JsonProperty("ServerTimestamp") private Double serverTimestamp; @JsonProperty("ProducerTimestamp") private Double producerTimestamp; @JsonProperty("FrameOffsetInSeconds") private Double frameOffsetInSeconds; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4018546116449531242L; @JsonProperty("StreamArn") public String getStreamArn() { return streamArn; } @JsonProperty("StreamArn") public void setStreamArn(String streamArn) { this.streamArn = streamArn; } @JsonProperty("FragmentNumber") public String getFragmentNumber() { return fragmentNumber; } @JsonProperty("FragmentNumber") public void setFragmentNumber(String fragmentNumber) { this.fragmentNumber = fragmentNumber; } @JsonProperty("ServerTimestamp") public Double getServerTimestamp() { return serverTimestamp; } @JsonProperty("ServerTimestamp") public void setServerTimestamp(Double serverTimestamp) { this.serverTimestamp = serverTimestamp; } @JsonProperty("ProducerTimestamp") public Double getProducerTimestamp() { return producerTimestamp; } @JsonProperty("ProducerTimestamp") public void setProducerTimestamp(Double producerTimestamp) { this.producerTimestamp = producerTimestamp; } @JsonProperty("FrameOffsetInSeconds") public Double getFrameOffsetInSeconds() { return frameOffsetInSeconds; } @JsonProperty("FrameOffsetInSeconds") public void setFrameOffsetInSeconds(Double frameOffsetInSeconds) { this.frameOffsetInSeconds = frameOffsetInSeconds; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("streamArn", streamArn) .append("fragmentNumber", fragmentNumber) .append("serverTimestamp", serverTimestamp) .append("producerTimestamp", producerTimestamp) .append("frameOffsetInSeconds", frameOffsetInSeconds) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(frameOffsetInSeconds) .append(fragmentNumber) .append(streamArn) .append(additionalProperties) .append(producerTimestamp) .append(serverTimestamp).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof KinesisVideo) == false) { return false; } KinesisVideo rhs = ((KinesisVideo) other); return new EqualsBuilder() .append(frameOffsetInSeconds, rhs.frameOffsetInSeconds) .append(fragmentNumber, rhs.fragmentNumber) .append(streamArn, rhs.streamArn) .append(additionalProperties, rhs.additionalProperties) .append(producerTimestamp, rhs.producerTimestamp) .append(serverTimestamp, rhs.serverTimestamp).isEquals(); } }
5,418
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/BoundingBox.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class BoundingBox implements Serializable { @JsonProperty("Height") private Double height; @JsonProperty("Width") private Double width; @JsonProperty("Left") private Double left; @JsonProperty("Top") private Double top; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -3845089061670074615L; @JsonProperty("Height") public Double getHeight() { return height; } @JsonProperty("Height") public void setHeight(Double height) { this.height = height; } @JsonProperty("Width") public Double getWidth() { return width; } @JsonProperty("Width") public void setWidth(Double width) { this.width = width; } @JsonProperty("Left") public Double getLeft() { return left; } @JsonProperty("Left") public void setLeft(Double left) { this.left = left; } @JsonProperty("Top") public Double getTop() { return top; } @JsonProperty("Top") public void setTop(Double top) { this.top = top; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("height", height) .append("width", width) .append("left", left) .append("top", top) .append("additionalProperties", additionalProperties) .toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(height) .append(additionalProperties) .append(width) .append(left) .append(top) .toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof BoundingBox) == false) { return false; } BoundingBox rhs = ((BoundingBox) other); return new EqualsBuilder() .append(height, rhs.height) .append(additionalProperties, rhs.additionalProperties) .append(width, rhs.width) .append(left, rhs.left) .append(top, rhs.top) .isEquals(); } }
5,419
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/StreamProcessorInformation.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class StreamProcessorInformation implements Serializable { @JsonProperty("Status") private String status; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -4043725115310892727L; @JsonProperty("Status") public String getStatus() { return status; } @JsonProperty("Status") public void setStatus(String status) { this.status = status; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("status", status) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(status) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof StreamProcessorInformation) == false) { return false; } StreamProcessorInformation rhs = ((StreamProcessorInformation) other); return new EqualsBuilder() .append(status, rhs.status) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,420
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognitionInput.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import lombok.Builder; import lombok.Value; /** * Input for Rekognition stream processor. */ @Value @Builder public class RekognitionInput { private String kinesisVideoStreamArn; private String kinesisDataStreamArn; private String iamRoleArn; private String faceCollectionId; private String streamingProcessorName; private Float matchThreshold; }
5,421
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/InputInformation.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class InputInformation implements Serializable { @JsonProperty("KinesisVideo") private KinesisVideo kinesisVideo; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4448679967188698414L; @JsonProperty("KinesisVideo") public KinesisVideo getKinesisVideo() { return kinesisVideo; } @JsonProperty("KinesisVideo") public void setKinesisVideo(KinesisVideo kinesisVideo) { this.kinesisVideo = kinesisVideo; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("kinesisVideo", kinesisVideo) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(kinesisVideo) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof InputInformation) == false) { return false; } InputInformation rhs = ((InputInformation) other); return new EqualsBuilder() .append(kinesisVideo, rhs.kinesisVideo) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,422
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognizedOutput.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.util.ArrayList; import java.util.List; import lombok.Builder; import lombok.Getter; import lombok.Setter; import lombok.ToString; @Builder @Getter @ToString public class RekognizedOutput { private String fragmentNumber; private Double frameOffsetInSeconds; private Double serverTimestamp; private Double producerTimestamp; @Setter private String faceId; private double detectedTime; @Builder.Default private List<FaceSearchOutput> faceSearchOutputs = new ArrayList<>(); public void addFaceSearchOutput(FaceSearchOutput faceSearchOutput) { this.faceSearchOutputs.add(faceSearchOutput); } @Getter @Builder @ToString public static class FaceSearchOutput { private DetectedFace detectedFace; @Builder.Default private List<MatchedFace> matchedFaceList = new ArrayList<>(); } }
5,423
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Face.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Face implements Serializable { @JsonProperty("BoundingBox") private BoundingBox boundingBox; @JsonProperty("FaceId") private String faceId; @JsonProperty("Confidence") private Double confidence; @JsonProperty("ImageId") private String imageId; @JsonProperty("ExternalImageId") private String externalImageId; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4320869723686571816L; @JsonProperty("BoundingBox") public BoundingBox getBoundingBox() { return boundingBox; } @JsonProperty("BoundingBox") public void setBoundingBox(BoundingBox boundingBox) { this.boundingBox = boundingBox; } @JsonProperty("FaceId") public String getFaceId() { return faceId; } @JsonProperty("FaceId") public void setFaceId(String faceId) { this.faceId = faceId; } @JsonProperty("Confidence") public Double getConfidence() { return confidence; } @JsonProperty("Confidence") public void setConfidence(Double confidence) { this.confidence = confidence; } @JsonProperty("ImageId") public String getImageId() { return imageId; } @JsonProperty("ImageId") public void setImageId(String imageId) { this.imageId = imageId; } @JsonProperty("ExternalImageId") public String getExternalImageId() { return externalImageId; } @JsonProperty("ExternalImageId") public void setExternalImageId(String externalImageId) { this.externalImageId = externalImageId; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this).append("boundingBox", boundingBox) .append("faceId", faceId).append("confidence", confidence) .append("imageId", imageId) .append("externalImageId", externalImageId) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder().append(boundingBox) .append(imageId) .append(externalImageId) .append(faceId) .append(additionalProperties).append(confidence).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Face) == false) { return false; } Face rhs = ((Face) other); return new EqualsBuilder().append(boundingBox, rhs.boundingBox) .append(imageId, rhs.imageId) .append(externalImageId, rhs.externalImageId) .append(faceId, rhs.faceId).append(additionalProperties, rhs.additionalProperties) .append(confidence, rhs.confidence).isEquals(); } }
5,424
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognitionOutput.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class RekognitionOutput implements Serializable { @JsonProperty("InputInformation") private InputInformation inputInformation; @JsonProperty("StreamProcessorInformation") private StreamProcessorInformation streamProcessorInformation; @JsonProperty("FaceSearchResponse") private List<FaceSearchResponse> faceSearchResponse = null; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -4243167512470204665L; @JsonProperty("InputInformation") public InputInformation getInputInformation() { return inputInformation; } @JsonProperty("InputInformation") public void setInputInformation(InputInformation inputInformation) { this.inputInformation = inputInformation; } @JsonProperty("StreamProcessorInformation") public StreamProcessorInformation getStreamProcessorInformation() { return streamProcessorInformation; } @JsonProperty("StreamProcessorInformation") public void setStreamProcessorInformation(StreamProcessorInformation streamProcessorInformation) { this.streamProcessorInformation = streamProcessorInformation; } @JsonProperty("FaceSearchResponse") public List<FaceSearchResponse> getFaceSearchResponse() { return faceSearchResponse; } @JsonProperty("FaceSearchResponse") public void setFaceSearchResponse(List<FaceSearchResponse> faceSearchResponse) { this.faceSearchResponse = faceSearchResponse; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("inputInformation", inputInformation) .append("streamProcessorInformation", streamProcessorInformation) .append("faceSearchResponse", faceSearchResponse) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(inputInformation) .append(additionalProperties) .append(faceSearchResponse) .append(streamProcessorInformation).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof RekognitionOutput) == false) { return false; } RekognitionOutput rhs = ((RekognitionOutput) other); return new EqualsBuilder() .append(inputInformation, rhs.inputInformation) .append(additionalProperties, rhs.additionalProperties) .append(faceSearchResponse, rhs.faceSearchResponse) .append(streamProcessorInformation, rhs.streamProcessorInformation).isEquals(); } }
5,425
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Landmark.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Landmark implements Serializable { @JsonProperty("X") private Double x; @JsonProperty("Y") private Double y; @JsonProperty("Type") private String type; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 8108892948615651543L; @JsonProperty("X") public Double getX() { return x; } @JsonProperty("X") public void setX(Double x) { this.x = x; } @JsonProperty("Y") public Double getY() { return y; } @JsonProperty("Y") public void setY(Double y) { this.y = y; } @JsonProperty("Type") public String getType() { return type; } @JsonProperty("Type") public void setType(String type) { this.type = type; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("x", x) .append("y", y) .append("type", type) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(additionalProperties) .append(type) .append(y) .append(x).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Landmark) == false) { return false; } Landmark rhs = ((Landmark) other); return new EqualsBuilder() .append(additionalProperties, rhs.additionalProperties) .append(type, rhs.type) .append(y, rhs.y) .append(x, rhs.x).isEquals(); } }
5,426
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Pose.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Pose implements Serializable { @JsonProperty("Pitch") private Double pitch; @JsonProperty("Roll") private Double roll; @JsonProperty("Yaw") private Double yaw; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 5134659150043632590L; @JsonProperty("Pitch") public Double getPitch() { return pitch; } @JsonProperty("Pitch") public void setPitch(Double pitch) { this.pitch = pitch; } @JsonProperty("Roll") public Double getRoll() { return roll; } @JsonProperty("Roll") public void setRoll(Double roll) { this.roll = roll; } @JsonProperty("Yaw") public Double getYaw() { return yaw; } @JsonProperty("Yaw") public void setYaw(Double yaw) { this.yaw = yaw; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("pitch", pitch) .append("roll", roll) .append("yaw", yaw) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(yaw) .append(roll) .append(additionalProperties) .append(pitch).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Pose) == false) { return false; } Pose rhs = ((Pose) other); return new EqualsBuilder() .append(yaw, rhs.yaw) .append(roll, rhs.roll) .append(additionalProperties, rhs.additionalProperties) .append(pitch, rhs.pitch).isEquals(); } }
5,427
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Quality.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Quality implements Serializable { @JsonProperty("Brightness") private Double brightness; @JsonProperty("Sharpness") private Double sharpness; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 2898836203617659983L; @JsonProperty("Brightness") public Double getBrightness() { return brightness; } @JsonProperty("Brightness") public void setBrightness(Double brightness) { this.brightness = brightness; } @JsonProperty("Sharpness") public Double getSharpness() { return sharpness; } @JsonProperty("Sharpness") public void setSharpness(Double sharpness) { this.sharpness = sharpness; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("brightness", brightness) .append("sharpness", sharpness) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(sharpness) .append(brightness) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Quality) == false) { return false; } Quality rhs = ((Quality) other); return new EqualsBuilder() .append(sharpness, rhs.sharpness) .append(brightness, rhs.brightness) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,428
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/MatchedFace.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class MatchedFace implements Serializable { @JsonProperty("Similarity") private Double similarity; @JsonProperty("Face") private Face face; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -5269363379216197335L; @JsonProperty("Similarity") public Double getSimilarity() { return similarity; } @JsonProperty("Similarity") public void setSimilarity(Double similarity) { this.similarity = similarity; } @JsonProperty("Face") public Face getFace() { return face; } @JsonProperty("Face") public void setFace(Face face) { this.face = face; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("similarity", similarity) .append("face", face) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(face) .append(additionalProperties) .append(similarity).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof MatchedFace) == false) { return false; } MatchedFace rhs = ((MatchedFace) other); return new EqualsBuilder() .append(face, rhs.face) .append(additionalProperties, rhs.additionalProperties) .append(similarity, rhs.similarity).isEquals(); } }
5,429
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/DetectedFace.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class DetectedFace implements Serializable { @JsonProperty("BoundingBox") private BoundingBox boundingBox; @JsonProperty("Confidence") private Double confidence; @JsonProperty("Landmarks") private List<Landmark> landmarks = null; @JsonProperty("Pose") private Pose pose; @JsonProperty("Quality") private Quality quality; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4389260550207592384L; @JsonProperty("BoundingBox") public BoundingBox getBoundingBox() { return boundingBox; } @JsonProperty("BoundingBox") public void setBoundingBox(BoundingBox boundingBox) { this.boundingBox = boundingBox; } @JsonProperty("Confidence") public Double getConfidence() { return confidence; } @JsonProperty("Confidence") public void setConfidence(Double confidence) { this.confidence = confidence; } @JsonProperty("Landmarks") public List<Landmark> getLandmarks() { return landmarks; } @JsonProperty("Landmarks") public void setLandmarks(List<Landmark> landmarks) { this.landmarks = landmarks; } @JsonProperty("Pose") public Pose getPose() { return pose; } @JsonProperty("Pose") public void setPose(Pose pose) { this.pose = pose; } @JsonProperty("Quality") public Quality getQuality() { return quality; } @JsonProperty("Quality") public void setQuality(Quality quality) { this.quality = quality; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("boundingBox", boundingBox) .append("confidence", confidence) .append("landmarks", landmarks) .append("pose", pose) .append("quality", quality) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(pose) .append(boundingBox) .append(landmarks) .append(additionalProperties) .append(quality) .append(confidence).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof DetectedFace) == false) { return false; } DetectedFace rhs = ((DetectedFace) other); return new EqualsBuilder() .append(pose, rhs.pose) .append(boundingBox, rhs.boundingBox) .append(landmarks, rhs.landmarks) .append(additionalProperties, rhs.additionalProperties) .append(quality, rhs.quality) .append(confidence, rhs.confidence).isEquals(); } }
5,430
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognizedFragmentsIndex.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; import lombok.extern.slf4j.Slf4j; /** * Index which stores results for each fragment number from Rekognition output i.e Kinesis Data Streams. * It normalizes each kinesis event record (which is for every sampled frame of a fragment) and stores * per fragment number in memory. Rekognition output can be mapped to the KVS fragments using either real-time * GetMedia or archived GetMediaForFragmentList call. So internally RekognizedFragmentsIndex uses two different data * structures for this reason. * * 1. ConcurrentLinkedQueue: Rekognition output is stored in LinkedQueue as soon as it's retrieved from Kinesis * Data Streams. This can be used to integrate with KVS GetMediaForFragmentList API, as the caller gets the fragment * number from Kinesis Data Streams. So the caller needs Rekognition output and fragment number in FIFO order which * is achieved by this Queue. * 2. ConcurrentHashMap: Rekognition output is stored as the value with the corresponding fragment number as the key * for the hash map. This is used while integrating with KVS GetMedia API, as the caller gets the fragment number * from real-time fragments retrieved. So the index needs an efficient search mechanism to search the Rekognition * outputs for a given fragment number. Searching in the above LinkedDeque becomes expensive as the number of items * stored in it increases if none of the items are processed. So this hash map serves as the index for the queue for * fast retrieval O(1) compared to linear search O(N). * */ @Slf4j @ToString public class RekognizedFragmentsIndex { private final ConcurrentHashMap<String, RekognizedFragment> rekognizedOutputMap = new ConcurrentHashMap<>(); private final ConcurrentLinkedQueue<RekognizedFragment> rekognizedOutputQueue = new ConcurrentLinkedQueue<>(); /** * Add Rekognized output to the index for a fragment number and its other attributes like producer time, * server time etc. * * @param fragmentNumber Fragment Number of the fragment * @param producerTime Producer time of the fragment * @param serverTime Server time of the fragment * @param rekognizedOutput Rekognition output of the fragment */ public synchronized void add(final String fragmentNumber, final Long producerTime, final Long serverTime, final RekognizedOutput rekognizedOutput) { if (rekognizedOutputMap.containsKey(fragmentNumber)) { final RekognizedFragment rekognizedFragment = rekognizedOutputMap.get(fragmentNumber); rekognizedFragment.addRekognizedOutput(rekognizedOutput); } else { final RekognizedFragment rekognizedFragment = new RekognizedFragment(fragmentNumber, producerTime, serverTime); rekognizedFragment.addRekognizedOutput(rekognizedOutput); rekognizedOutputQueue.add(rekognizedFragment); rekognizedOutputMap.put(fragmentNumber, rekognizedFragment); } log.debug("Added rekognized fragment number {} to the index.", fragmentNumber); } /** * Polls the index for first available rekognized fragment. * * @return RekognizedFragment if exists. If not returns null. */ public synchronized RekognizedFragment poll() { final RekognizedFragment rekognizedFragment = rekognizedOutputQueue.poll(); rekognizedOutputMap.remove(rekognizedFragment.getFragmentNumber()); return rekognizedFragment; } public int size() { log.debug("Rekognized index Map size : {} queue size : {}", rekognizedOutputMap.size(), rekognizedOutputQueue.size()); if (rekognizedOutputMap.size() != rekognizedOutputQueue.size()) { throw new IllegalStateException("RekognizedFragmentsIndex map and queue size doesn't match"); } return this.rekognizedOutputQueue.size(); } /** * Checks the index for any available rekognized fragment. * * @return true if exists. false otherwise. */ public synchronized boolean isEmpty() { return rekognizedOutputQueue.isEmpty(); } /** * Gets the list of Rekognized Output for the given fragment number. * * @param fragmentNumber Input fragment number. * @return List of rekognized outputs if exists. null otherwise. */ public synchronized List<RekognizedOutput> getRekognizedOutputList(final String fragmentNumber) { return (rekognizedOutputMap.containsKey(fragmentNumber)) ? rekognizedOutputMap.get(fragmentNumber).getRekognizedOutputs() : null; } /** * Removes the rekognized fragment from the index for the given fragment number. * * @param fragmentNumber Input fragment number. */ public synchronized void remove(final String fragmentNumber) { if (rekognizedOutputMap.containsKey(fragmentNumber)) { final RekognizedFragment rekognizedFragment = rekognizedOutputMap.remove(fragmentNumber); rekognizedOutputQueue.remove(rekognizedFragment); } } @Getter @ToString @EqualsAndHashCode @RequiredArgsConstructor public static class RekognizedFragment { private final String fragmentNumber; private final Long producerTime; private final Long serverTime; private final List<RekognizedOutput> rekognizedOutputs = new ArrayList<>(); public void addRekognizedOutput(final RekognizedOutput rekognizedOutput) { this.rekognizedOutputs.add(rekognizedOutput); } } }
5,431
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/FaceType.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.awt.Color; import lombok.Getter; import lombok.RequiredArgsConstructor; /** * Enum which lists down the sample types of the faces detected in given frame. This list can be expanded * based on the face type given in external image id while creating face collection. * * For more information please refer * https://docs.aws.amazon.com/rekognition/latest/dg/add-faces-to-collection-procedure.html */ @Getter @RequiredArgsConstructor public enum FaceType { TRUSTED (Color.GREEN, "Trusted"), CRIMINAL (Color.RED, "Criminal"), UNKNOWN (Color.YELLOW, "Unknown"), NOT_RECOGNIZED (Color.PINK, "NotRecognized"), ALL (Color.BLACK, "All"); private final Color color; private final String prefix; public static FaceType fromString(String value) { for (int i = 0; i < FaceType.values().length; i++) { if(FaceType.values()[i].getPrefix().toUpperCase().equals(value.toUpperCase())) return FaceType.values()[i]; } return FaceType.UNKNOWN; } }
5,432
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/FaceSearchResponse.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class FaceSearchResponse implements Serializable { @JsonProperty("DetectedFace") private DetectedFace detectedFace; @JsonProperty("MatchedFaces") private List<MatchedFace> matchedFaces = null; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -5645575235038800306L; @JsonProperty("DetectedFace") public DetectedFace getDetectedFace() { return detectedFace; } @JsonProperty("DetectedFace") public void setDetectedFace(DetectedFace detectedFace) { this.detectedFace = detectedFace; } @JsonProperty("MatchedFaces") public List<MatchedFace> getMatchedFaces() { return matchedFaces; } @JsonProperty("MatchedFaces") public void setMatchedFaces(List<MatchedFace> matchedFaces) { this.matchedFaces = matchedFaces; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("detectedFace", detectedFace) .append("matchedFaces", matchedFaces) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(matchedFaces) .append(detectedFace) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof FaceSearchResponse) == false) { return false; } FaceSearchResponse rhs = ((FaceSearchResponse) other); return new EqualsBuilder() .append(matchedFaces, rhs.matchedFaces) .append(detectedFace, rhs.detectedFace) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,433
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/ParserByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; /** * Represents a source of bytes that can be parsed by the EBML parser. * It could be backed by a ByteBuffer or a netty ByteBuf or an input stream that can support these operations. */ public interface ParserByteSource extends ParserBulkByteSource { int readByte(); int available(); boolean eof(); }
5,434
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLTypeInfo.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; /** * The type information for an EBML element. * This specifies the semantics of the EBML elements in an EBML document. * For example the TypeInfo for MKV will specify the semantics for the EBML elements that make up a MKV document. */ @Builder @AllArgsConstructor(access = AccessLevel.PUBLIC) @Getter @ToString @EqualsAndHashCode public class EBMLTypeInfo { private final int id; private final String name; private final int level; private final TYPE type; @Builder.Default private boolean isRecursive = false; public boolean isGlobal() { return level < 0; } public enum TYPE { INTEGER, UINTEGER, FLOAT, STRING, UTF_8, DATE, MASTER, BINARY } }
5,435
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLParserInternalElement.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.Getter; import lombok.ToString; import org.apache.commons.lang3.Validate; import java.nio.ByteBuffer; import java.util.Optional; import static com.amazonaws.kinesisvideo.parser.ebml.EBMLUtils.UNKNOWN_LENGTH_VALUE; /** * This class is used by the parser to represent an EBML Element internally. */ @ToString class EBMLParserInternalElement { enum ElementReadState { NEW, ID_DONE, SIZE_DONE, CONTENT_READING, CONTENT_SKIPPING, FINISHED } private final long startingOffset; @Getter private final long elementCount; ElementReadState currentElementReadState = ElementReadState.NEW; @Getter private int id; private long idNumBytes; @Getter private long dataSize; private long dataSizeNumBytes; private Optional<EBMLElementMetaData> elementMetaData = Optional.empty(); public EBMLParserInternalElement(long startingOffset, long elementCount) { this.startingOffset = startingOffset; this.elementCount = elementCount; } public void readId(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource) { Validate.isTrue(currentElementReadState == ElementReadState.NEW); idAndSizeByteSource.setReadOffsetForReplayBuffer(startingOffset); EBMLUtils.readId(idAndSizeByteSource, this::setId); } public void readSize(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource) { Validate.isTrue(currentElementReadState == ElementReadState.ID_DONE); idAndSizeByteSource.setReadOffsetForReplayBuffer(startingOffset + idNumBytes); EBMLUtils.readSize(idAndSizeByteSource, this::setSize); } public void updateTypeInfo(EBMLTypeInfoProvider typeInfoProvider) { Validate.isTrue(currentElementReadState == ElementReadState.SIZE_DONE); Optional<EBMLTypeInfo> typeInfo = typeInfoProvider.getType(id); if (typeInfo.isPresent()) { elementMetaData = Optional.of(new EBMLElementMetaData(typeInfo.get(), elementCount)); } } public boolean isKnownType() { return elementMetaData.isPresent(); } public EBMLTypeInfo getTypeInfo() { return elementMetaData.get().getTypeInfo(); } public EBMLElementMetaData getMetadata() { Validate.isTrue(elementMetaData.isPresent(), "EBML element metadata "); return elementMetaData.get(); } public void startReadingContent() { Validate.isTrue(currentElementReadState == ElementReadState.SIZE_DONE); currentElementReadState = ElementReadState.CONTENT_READING; } public void startSkippingContent() { Validate.isTrue(currentElementReadState == ElementReadState.SIZE_DONE); currentElementReadState = ElementReadState.CONTENT_SKIPPING; } public void readContent(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource, ParserBulkByteSource bulkByteSource, EBMLParserCallbacks callbacks, int maxContentBytesInOnePass) { Validate.isTrue(currentElementReadState == ElementReadState.CONTENT_READING); long bytesToRead = getBytesToRead(idAndSizeByteSource, maxContentBytesInOnePass); //Call onPartialContent if bytesToRead > 0. if (bytesToRead > 0) { callbacks.onPartialContent(elementMetaData.get(), bulkByteSource, (int) bytesToRead); } if (!isUnknownLength() && idAndSizeByteSource.getTotalBytesRead() >= getContentStartOffset() + dataSize) { currentElementReadState = ElementReadState.FINISHED; } } public void skipContent(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource, ParserBulkByteSource bulkByteSource, ByteBuffer skipBuffer) { Validate.isTrue(currentElementReadState == ElementReadState.CONTENT_SKIPPING); long bytesToRead = getBytesToRead(idAndSizeByteSource, skipBuffer.remaining()); if (bytesToRead > 0) { bulkByteSource.readBytes(skipBuffer, (int )bytesToRead); } if (idAndSizeByteSource.getTotalBytesRead() >= getContentStartOffset() + dataSize) { currentElementReadState = ElementReadState.FINISHED; } } public boolean isUnknownLength() { return dataSize == UNKNOWN_LENGTH_VALUE; } public long endOffSet() { Validate.isTrue(!isUnknownLength()); return getContentStartOffset() + dataSize; } private long getContentStartOffset() { return startingOffset + idNumBytes + dataSizeNumBytes; } private void setId(int idArg, long idNumBytes) { Validate.isTrue(currentElementReadState == ElementReadState.NEW); this.id = idArg; this.idNumBytes = idNumBytes; currentElementReadState = ElementReadState.ID_DONE; } private void setSize(long sizeArg, long sizeNumBytes) { Validate.isTrue(currentElementReadState == ElementReadState.ID_DONE); this.dataSize = sizeArg; this.dataSizeNumBytes = sizeNumBytes; currentElementReadState = ElementReadState.SIZE_DONE; } private long getBytesToRead(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource, int maxContentBytesInOnePass) { long bytesToRead = dataSize + getContentStartOffset() - (idAndSizeByteSource.getTotalBytesRead()); bytesToRead = Math.min(bytesToRead, maxContentBytesInOnePass); bytesToRead = Math.min(bytesToRead, idAndSizeByteSource.availableForContent()); return bytesToRead; } }
5,436
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLUtils.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import org.apache.commons.lang3.Validate; import java.math.BigInteger; import java.nio.ByteBuffer; /** * Class used to parse EBML Ids and Sizes that make up the EBML element's meta data. */ public class EBMLUtils { public static final long UNKNOWN_LENGTH_VALUE = -1; /** * Max length for a EBML ID */ public static final int EBML_ID_MAX_BYTES = 4; public static final int EBML_SIZE_MAX_BYTES = 8; private static final int BYTE_MASK = 0xFF; /** Default constructor to make checkstyle happy */ private EBMLUtils() { } /** * constant for byte with first bit set. */ private static final int BYTE_WITH_FIRST_BIT_SET = 0b10000000; static void readId(final TrackingReplayableIdAndSizeByteSource source, IdConsumer resultAcceptor) { if (!isEnoughBytes(source, 1)) { return; } final int firstByte = readByte(source); if (firstByte == -1) { resultAcceptor.accept(firstByte, 1); } Validate.isTrue(firstByte >= 0, "EBML Id has negative firstByte" + firstByte); final int numAdditionalBytes = getNumLeadingZeros(firstByte); if (!isEnoughBytes(source, numAdditionalBytes)) { return; } Validate.isTrue(numAdditionalBytes <= (EBML_ID_MAX_BYTES - 1), "Trying to decode an EBML ID and it wants " + numAdditionalBytes + " more bytes, but IDs max out at 4 bytes. firstByte was " + firstByte); final int rest = (int) readEbmlValueNumber(source, numAdditionalBytes); resultAcceptor.accept(firstByte << (numAdditionalBytes * Byte.SIZE) | rest, numAdditionalBytes + 1); } /** * Read a variable-size integer that encodes its own length. * Used to read the size of an ebml element. * <p> * 2.1. Variable size integer * <p> * For both element ID and size descriptor EBML uses a variable size * integer, coded according to a schema similar to that of UTF-8 * [UTF-8] encoding. The variable size integer begins with zero or * more zero bits to define the width of the integer. Zero zeroes * means a width of one byte, one zero a width of two bytes etc. The * zeroes are followed by a marker of one set bit and then follows the * actual integer data. The integer data consists of alignment data * and tail data. The alignment data together with the width * descriptor and the marker makes up one ore more complete bytes. The * tail data is as many bytes as there were zeroes in the width * descriptor, i.e. width-1. * <p> * VINT = VINT_WIDTH VINT_MARKER VINT_DATA * VINT_WIDTH = *%b0 * VINT_MARKER = %b1 * VINT_DATA = VINT_ALIGNMENT VINT_TAIL * VINT_ALIGNMENT = *BIT * VINT_TAIL = *BYTE * <p> * An alternate way of expressing this is the following definition, * where the width is the number of levels of expansion. * <p> * VINT = ( %b0 VINT 7BIT ) / ( %b1 7BIT ) * <p> * Some examples of the encoding of integers of width 1 to 4. The x:es * represent bits where the actual integer value would be stored. * <p> * Width Size Representation * 1 2^7 1xxx xxxx * 2 2^14 01xx xxxx xxxx xxxx * 3 2^21 001x xxxx xxxx xxxx xxxx xxxx * 4 2^28 0001 xxxx xxxx xxxx xxxx xxxx xxxx xxxx * * @param source buffer containing chunks of data * @param resultAcceptor the callback called when the size of an ebml element is identified. * @see "http://www.matroska.org/technical/specs/rfc/index.html" */ private static void readEbmlInt(final TrackingReplayableIdAndSizeByteSource source, SizeConsumer resultAcceptor) { if (!isEnoughBytes(source, 1)) { return; } final int firstByte = readByte(source); Validate.isTrue(firstByte >= 0, "EBML Int has negative firstByte" + firstByte); final int size = getNumLeadingZeros(firstByte); if (!isEnoughBytes(source, size)) { return; } // Read the rest of the bytes final long rest = readEbmlValueNumber(source, size); long value = (firstByte & ~((byte) BYTE_WITH_FIRST_BIT_SET >> size)) << (size * Byte.SIZE) | rest; long unknownValue = (0xff >> (size + 1)); unknownValue <<= size * 8; unknownValue |= (1L << (size * 8)) - 1; // Special handing for unknown length if (value == unknownValue) { value = -1; } // Slap the first byte's value onto the front (with the first one-bit unset) resultAcceptor.accept(value, size + 1); } /** * Read an EBML integer value of varying length from the provided buffer. * @param byteBuffer The buffer to read from. * @return The integer value. * @see "http://www.matroska.org/technical/specs/rfc/index.html" */ public static long readEbmlInt(final ByteBuffer byteBuffer) { final int firstByte = byteBuffer.get() & BYTE_MASK; Validate.isTrue(firstByte >= 0, "EBML Int has negative firstByte" + firstByte); final int size = getNumLeadingZeros(firstByte); // Read the rest of the bytes final long rest = readUnsignedIntegerSevenBytesOrLess(byteBuffer, size); // Slap the first byte's value onto the front (with the first one-bit unset) return ((firstByte & ~((byte) BYTE_WITH_FIRST_BIT_SET >> size)) << (size * Byte.SIZE) | rest); } /** * An alias for readEbmlInt that makes it clear we're reading a data size value. * * @return long value */ static void readSize(final TrackingReplayableIdAndSizeByteSource source, SizeConsumer resultAcceptor) { readEbmlInt(source, resultAcceptor); } private static int readByte(final TrackingReplayableIdAndSizeByteSource source) { return source.readByte() & BYTE_MASK; } private static boolean isEnoughBytes(final TrackingReplayableIdAndSizeByteSource source, final int len) { return source.checkAndReadIntoReplayBuffer(len); } /** * Gets the number of leading zero bits in the specified integer as if it were a byte (to avoid a cast). * <p> * This is the "count leading zeroes" problem: http://en.wikipedia.org/wiki/Find_first_set * <p> * Intel processors actually have this as a built-in instruction but we * can't access that from the JVM. * * @param b byte for which we need to find the number of leading zeros. * This is typed as an int but should only have the lower 8 bits set.A * @return number of leading zeros in the byte. */ private static int getNumLeadingZeros(final int b) { return Integer.numberOfLeadingZeros(b) - (Integer.SIZE - Byte.SIZE); } /** * Read a variable-length data payload as a number, given its size. * <p> * EBML uses big endian/network order byte order, i.e. most * significant bit first. All of the tokens above are byte aligned. * <p> * Besides having an element list as data payload an element can have * its data typed with any of seven predefined data types. The actual * type information isn't stored in EBML but is inferred from the * document type definition through the element ID. The defined data * types are signed integer, unsigned integer, float, ASCII string, * UTF-8 string, date and binary data. * <p> * VALUE = INT / UINT / FLOAT / STRING / DATE / BINARY * <p> * INT = *8BYTE * <p> * Signed integer, represented in two's complement notation, sizes * from 0-8 bytes. A zero byte integer represents the integer value 0. * * @param source buffer containing chunks of data * @param size Size of the integer in bytes * @return long value */ private static long readEbmlValueNumber(final TrackingReplayableIdAndSizeByteSource source, final long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(isEnoughBytes(source, (int) size)); long value = 0; for (int i = 0; i < size; i++) { // readByte(buffer) returns a value from 0-255 as an int, already masked with 0xFF final int result = readByte(source); value = (value << Byte.SIZE) | result; } return value; } /** * A specialized method used to read a variable length unsigned integer of size 7 bytes or less. * @param byteBuffer The byteBuffer to read from. * @param size The size of bytes. * @return The long containing the integer value. */ public static long readUnsignedIntegerSevenBytesOrLess(final ByteBuffer byteBuffer, long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES - 1, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(byteBuffer.remaining() >= size); long value = 0; for (int i = 0; i < size; i++) { final int result = byteBuffer.get() & 0xFF; value = (value << Byte.SIZE) | result; } return value; } public static long readDataSignedInteger(final ByteBuffer byteBuffer, long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(byteBuffer.remaining() >= size); long value = 0; for (int i = 0; i < size; i++) { final int result = byteBuffer.get() & 0xFF; if (i == 0) { boolean positive = (result & 0x80) == 0; if (!positive) { value = -1; } } value = (value << Byte.SIZE) | result; } return value; } public static BigInteger readDataUnsignedInteger(final ByteBuffer byteBuffer, long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(byteBuffer.remaining() >= size); byte [] byteArray = new byte[(int)size]; byteBuffer.get(byteArray); return new BigInteger(1, byteArray); } @FunctionalInterface interface IdConsumer { void accept(int val, long idNumBytes); } @FunctionalInterface interface SizeConsumer { void accept(long val, long sizeNumBytes); } }
5,437
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLParser.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.AccessLevel; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.Validate; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.List; import java.util.Stack; import java.util.stream.Collectors; /** * This class is used to parse a stream of EBML. * It is based on the ebml specification published by the Matroska Org * (https://github.com/Matroska-Org/ebml-specification/blob/master/specification.markdown). * * A new instance of this object is created for a new stream of EBML (the response stream for a GetMedia call). * A new instance is configured with a {@link EBMLTypeInfoProvider} that provides the semantics for the EBML document * being parsed and a {@link EBMLParserCallbacks} that receives callbacks as the parser detects different EBML elements. * Once an instance of the EBML parser is created, the parse method is invoked repeatedly. * A stream of EBML is encapsulated by a {@link ParserByteSource} and is an argument to the parse method. * The parse method is non-blocking and consumes all the data passed to it in each invocation. * As the parser detects EBML elements it invokes methods on the {@link EBMLParserCallbacks}. * Once all the data in an EBML stream has being sent to the parser, the method closeParser is called to shutdown * the parser. * * TODO: add implementation details. * */ @Slf4j public class EBMLParser { private static final int BYTE_MASK = 0xFF; //TODO: have it be an argument, either constructor or method private static final int DEFAULT_MAX_CONTENT_BYTES_IN_ONE_PASS = 8192; private final EBMLTypeInfoProvider typeInfoProvider; private final Stack<EBMLParserInternalElement> masterElements; private final EBMLParserCallbacks callbacks; private final int maxContentBytesInOnePass; private final ByteBuffer skipBuffer; private long elementCount = 0; private long totalBytesRead = 0; @Getter(AccessLevel.PACKAGE) private boolean endOfStream; @Getter(AccessLevel.PUBLIC) private boolean closed; private EBMLParserInternalElement currentElement; private ReplayIdAndSizeBuffer replayIdAndSizeBuffer; public EBMLParser(EBMLTypeInfoProvider typeInfoProvider, EBMLParserCallbacks callbacks) { this(typeInfoProvider, callbacks, DEFAULT_MAX_CONTENT_BYTES_IN_ONE_PASS); } public EBMLParser(EBMLTypeInfoProvider typeInfoProvider, EBMLParserCallbacks callbacks, int maxContentBytesInOnePass) { this.typeInfoProvider = typeInfoProvider; this.callbacks = callbacks; this.replayIdAndSizeBuffer = new ReplayIdAndSizeBuffer(EBMLUtils.EBML_ID_MAX_BYTES + EBMLUtils.EBML_SIZE_MAX_BYTES); createNewCurrentElementInfo(); this.masterElements = new Stack<>(); this.maxContentBytesInOnePass = maxContentBytesInOnePass; this.skipBuffer = ByteBuffer.allocate(maxContentBytesInOnePass); log.debug("Creating EBMLParser with maxContentBytesInOnePass {}", this.maxContentBytesInOnePass); } public void parse(ParserByteSource byteSource) { try (CallState callState = new CallState(byteSource)) { while (callState.shouldContinueParsing()) { if (log.isDebugEnabled()) { log.debug("Current element read state {}", currentElement.currentElementReadState); } switch (currentElement.currentElementReadState) { case NEW: //check if any master elements are done because their end offset has been reached. removeMasterElementsBasedOnSizeEnd(); currentElement.readId(callState); break; case ID_DONE: currentElement.readSize(callState); break; case SIZE_DONE: currentElement.updateTypeInfo(typeInfoProvider); //check if any master elements are done because an equal or higher level //element is reached. removeMasterElementsBasedOnLevel(); //Call onstartForElement(); if (currentElement.isKnownType()) { log.debug("Invoking onStartElement for current element {}", currentElement); callbacks.onStartElement(currentElement.getMetadata(), currentElement.getDataSize(), replayIdAndSizeBuffer.getByteBuffer(), this::currentElementPath); } startReadingContentBasedOnType(); break; case CONTENT_READING: Validate.isTrue(currentElement.isKnownType(), "We should read only from elements with known types"); currentElement.readContent(callState, callState, callbacks, maxContentBytesInOnePass); break; case CONTENT_SKIPPING: Validate.isTrue(!currentElement.isKnownType(), "We should skip data for unknown elements only"); skipBuffer.rewind(); currentElement.skipContent(callState, callState, skipBuffer); break; case FINISHED: invokeOnEndElementCallback(currentElement); //check if any master elements are done because their end offset has been reached. removeMasterElementsBasedOnSizeEnd(); createNewCurrentElementInfo(); break; default: throw new IllegalArgumentException("Unexpected ElementReadState"); } } log.debug("Stopping parsing"); if (endOfStream) { closeParser(); } } } public void closeParser() { if (!closed) { log.debug("Closing EBMLParser"); //close current element if (currentElement != null && currentElement.isKnownType()) { log.debug("Closing with currentElement {} still set, invoking end element callback on it", currentElement); invokeOnEndElementCallback(currentElement); currentElement = null; } log.debug("Closing with {} master elements on stack, invoking end element callback on them", masterElements.size()); while (!masterElements.isEmpty()) { EBMLParserInternalElement top = masterElements.pop(); //TODO: see if we need to add a flag to indicate unclean close invokeOnEndElementCallback(top); } } closed = true; } private void startReadingContentBasedOnType() { if (!currentElement.isKnownType()) { Validate.isTrue(!currentElement.isUnknownLength(), "Cannot skip element of unknown length"); currentElement.startSkippingContent(); log.warn("Will skip content for element number {} with unknown id {} datasize {}", currentElement.getElementCount(), currentElement.getId(), currentElement.getDataSize()); } else { if (currentElement.getTypeInfo().getType() == EBMLTypeInfo.TYPE.MASTER) { //Mark the master element as started although it will consist of //child elements. So, push it into the stack of master elements whose //contents are currently being read. currentElement.startReadingContent(); masterElements.push(currentElement); createNewCurrentElementInfo(); } else { //A non-master element should not have unknown or infinite length //as that prevents the parser finding the end of the element. Validate.isTrue(!currentElement.isUnknownLength(), "A non-master element should not have unknown length"); //start reading contents. currentElement.startReadingContent(); } } } private void removeMasterElementsBasedOnLevel() { if (!currentElement.isKnownType()) { return; } if (!currentElement.getTypeInfo().isGlobal()) { while (!masterElements.isEmpty()) { EBMLParserInternalElement top = masterElements.peek(); //For handling master elements with the wrong size (such as segments) //We should finish master elements of known size is another element of the same or //lower level is found. Validate.isTrue(currentElement.getElementCount() != top.getElementCount()); if (currentElement.getTypeInfo().getLevel() <= top.getTypeInfo().getLevel()) { log.debug("Removing master element {} based on level of current element {}", top, currentElement); masterElements.pop(); invokeOnEndElementCallback(top); } else { break; } } } } private void removeMasterElementsBasedOnSizeEnd() { if (!currentElement.isKnownType()) { return; } while (!masterElements.isEmpty()) { EBMLParserInternalElement top = masterElements.peek(); if (!top.isUnknownLength()) { if (top.endOffSet() <= totalBytesRead) { log.debug("Removing master element {} based on size end {}", top, totalBytesRead); masterElements.pop(); invokeOnEndElementCallback(top); } else { break; } } else { break; } } } private List<EBMLElementMetaData> currentElementPath() { return masterElements.stream().map(EBMLParserInternalElement::getMetadata).collect(Collectors.toList()); } private void invokeOnEndElementCallback(EBMLParserInternalElement finishedElement) { if (finishedElement.isKnownType()) { log.debug("Invoking onStartElement for current element {}", finishedElement); callbacks.onEndElement(finishedElement.getMetadata(), this::currentElementPath); } } private void createNewCurrentElementInfo() { currentElement = new EBMLParserInternalElement(totalBytesRead, elementCount); elementCount++; replayIdAndSizeBuffer.init(totalBytesRead); } /** * This internal class maintains state for each parse call. */ @RequiredArgsConstructor private class CallState implements Closeable, TrackingReplayableIdAndSizeByteSource, ParserBulkByteSource { private boolean parseMore = true; private final ParserByteSource byteSource; @Setter private long readOffsetForReplayBuffer; @Override public long getTotalBytesRead() { return totalBytesRead; } @Override public void close() { } boolean shouldContinueParsing() { return !endOfStream && parseMore && callbacks.continueParsing(); } @Override public boolean checkAndReadIntoReplayBuffer(int len) { if (parseMore) { int availableInReplayBuffer = replayIdAndSizeBuffer.availableAfter(readOffsetForReplayBuffer); Validate.isTrue(availableInReplayBuffer >= 0); if (availableInReplayBuffer >= len) { return true; } else { int numBytesToRead = len - availableInReplayBuffer; parseMore = byteSource.available() >= numBytesToRead; numBytesToRead = Math.min(numBytesToRead, byteSource.available()); for (int i = 0; i < numBytesToRead; i++) { readFromByteSourceIntoReplayBuffer(); } } } return parseMore; } @Override public int readByte() { if (replayIdAndSizeBuffer.inReplayBuffer(readOffsetForReplayBuffer)) { byte result = replayIdAndSizeBuffer.getByteFromOffset(readOffsetForReplayBuffer); readOffsetForReplayBuffer++; return result & BYTE_MASK; } else { int result = readFromByteSourceIntoReplayBuffer(); readOffsetForReplayBuffer++; return result; } } private int readFromByteSourceIntoReplayBuffer() { int result = byteSource.readByte(); if (result == -1) { markAsEndofStream(); return -1; } Validate.inclusiveBetween(0, BYTE_MASK, result); replayIdAndSizeBuffer.addByte((byte) result); totalBytesRead++; return result; } @Override public int availableForContent() { if (parseMore) { int availableBytes = byteSource.available(); if (availableBytes == 0) { parseMore = false; } return availableBytes; } return 0; } @Override public int readBytes(ByteBuffer dest, int numBytes) { int readBytes = byteSource.readBytes(dest, numBytes); if (readBytes == -1) { markAsEndofStream(); return readBytes; } Validate.isTrue(readBytes >= 0); totalBytesRead += readBytes; return readBytes; } private void markAsEndofStream() { endOfStream = true; parseMore = false; } } }
5,438
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/ReplayIdAndSizeBuffer.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import org.apache.commons.lang3.Validate; import java.nio.ByteBuffer; /** * Buffer used to replay the id and size of ebml elements in the ebml parser */ class ReplayIdAndSizeBuffer { private int count; private final byte[] buffer; private long startingOffset; ReplayIdAndSizeBuffer(int length) { buffer = new byte[length]; } void init(long startingOffset) { this.startingOffset = startingOffset; count = 0; } void addByte(byte val) { Validate.isTrue(count < buffer.length, "Too many bytes being added to replay buffer " + count); buffer[count] = val; count++; } boolean inReplayBuffer(long readOffset) { return (readOffset - startingOffset) < count; } int availableAfter(long readOffset) { return (int) Math.max(0, startingOffset + count - readOffset); } byte getByteFromOffset(long readOffset) { Validate.isTrue(inReplayBuffer(readOffset), "Attempt to read from replay buffer at " + readOffset + "while buffer starts at" + startingOffset + "and has " + count + "bytes"); return buffer[(int) (readOffset - startingOffset)]; } ByteBuffer getByteBuffer() { return ByteBuffer.wrap(buffer, 0, count); } }
5,439
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLTypeInfoProvider.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import java.util.Optional; /** * An interface that vends type information used by the EBML parser. */ public interface EBMLTypeInfoProvider { Optional<EBMLTypeInfo> getType(int id); }
5,440
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/InputStreamParserByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import org.apache.commons.lang3.Validate; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; /** * An implementation of ParserByteSource that wraps an input stream containing the EBML stream. */ public class InputStreamParserByteSource implements ParserByteSource { private static final int BUFFER_SIZE = 8192; private static final int MARK_SIZE = 100; private final BufferedInputStream bufferedInputStream; public InputStreamParserByteSource(final InputStream inputStream) { this(inputStream, BUFFER_SIZE); } InputStreamParserByteSource(final InputStream inputStream, final int bufferSize) { bufferedInputStream = new BufferedInputStream(inputStream, bufferSize); Validate.isTrue(bufferedInputStream.markSupported()); } @Override public int readByte() { try { return bufferedInputStream.read(); } catch (final IOException e) { throw new RuntimeException("Exception while reading byte from input stream!", e); } } @Override public int available() { try { return bufferedInputStream.available(); } catch (final IOException e) { throw new RuntimeException("Exception while getting available bytes from input stream!", e); } } @Override public int readBytes(final ByteBuffer dest, final int numBytes) { try { Validate.isTrue(dest.remaining() >= numBytes); final int numBytesRead = bufferedInputStream.read(dest.array(), dest.position(), numBytes); if (numBytesRead > 0) { dest.position(dest.position() + numBytesRead); } return numBytesRead; } catch (final IOException e) { throw new RuntimeException("Exception while reading bytes from input stream!", e); } } @Override public boolean eof() { try { bufferedInputStream.mark(MARK_SIZE); if (readByte() == -1) { return true; } bufferedInputStream.reset(); return false; } catch (final IOException e) { throw new RuntimeException("Exception while resetting input stream!", e); } } }
5,441
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/TrackingReplayableIdAndSizeByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; /** * An interface representing a byte source that can replay the bytes for ebml id and size. * It also keeps track of the total number of bytes read by the parser from the underlying * byte source. * This wraps a parser byte source passed in by the user. */ interface TrackingReplayableIdAndSizeByteSource { boolean checkAndReadIntoReplayBuffer(int len); int readByte(); int availableForContent(); void setReadOffsetForReplayBuffer(long readOffset); long getTotalBytesRead(); }
5,442
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/ParserBulkByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import java.nio.ByteBuffer; /** * An interface representing a byte source for the parser which allows bulk reads. */ public interface ParserBulkByteSource { int readBytes(ByteBuffer dest, int numBytes); }
5,443
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLElementMetaData.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; /** * Class that represents the metadata of a single EBML element in an EBML stream. * It does not contain the actual data or content of the EBML element. */ @Getter @Builder @ToString @EqualsAndHashCode public class EBMLElementMetaData { private final EBMLTypeInfo typeInfo; private final long elementNumber; public boolean isMaster() { return typeInfo.getType() == EBMLTypeInfo.TYPE.MASTER; } }
5,444
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLParserCallbacks.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import java.nio.ByteBuffer; import java.util.List; /** * The EBMLParser invokes these callbacks when it detects the start, end and contents of elements. */ public interface EBMLParserCallbacks { void onStartElement(EBMLElementMetaData elementMetaData, long elementDataSize, ByteBuffer idAndSizeRawBytes, ElementPathSupplier pathSupplier); void onPartialContent(EBMLElementMetaData elementMetaData, ParserBulkByteSource bulkByteSource, int bytesToRead); void onEndElement(EBMLElementMetaData elementMetaData, ElementPathSupplier pathSupplier); default boolean continueParsing() { return true; } @FunctionalInterface interface ElementPathSupplier { List<EBMLElementMetaData> getAncestors(); } }
5,445
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/MkvTypeInfos.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; /** * Type information for the EBML elements in a Mkv file or stream. * This provides the semantics of the EBML elements in a Mkv file or stream. * This is based on the xml file hosted by the matroska org at * https://github.com/Matroska-Org/foundation-source/blob/master/spectool/specdata.xml (commit e074b5d) */ public class MkvTypeInfos { public static final EBMLTypeInfo EBML = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBML").id(0x1A45DFA3).level(0).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo EBMLVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLVersion").id(0x4286).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EBMLREADVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLReadVersion").id(0x42F7).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EBMLMAXIDLENGTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLMaxIDLength").id(0x42F2).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EBMLMAXSIZELENGTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLMaxSizeLength").id(0x42F3).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DOCTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DocType").id(0x4282).level(1).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo DOCTYPEVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DocTypeVersion").id(0x4287).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DOCTYPEREADVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DocTypeReadVersion").id(0x4285).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo VOID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Void").id(0xEC).level(-1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CRC_32 = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CRC-32").id(0xBF).level(-1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SIGNATURESLOT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureSlot").id(0x1B538667).level(-1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SIGNATUREALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureAlgo").id(0x7E8A).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIGNATUREHASH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureHash").id(0x7E9A).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIGNATUREPUBLICKEY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignaturePublicKey").id(0x7EA5).level(1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SIGNATURE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Signature").id(0x7EB5).level(1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SIGNATUREELEMENTS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureElements").id(0x7E5B).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SIGNATUREELEMENTLIST = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureElementList").id(0x7E7B).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SIGNEDELEMENT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignedElement").id(0x6532).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SEGMENT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Segment").id(0x18538067).level(0).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEEKHEAD = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekHead").id(0x114D9B74).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEEK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Seek").id(0x4DBB).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEEKID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekID").id(0x53AB).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SEEKPOSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekPosition").id(0x53AC).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo INFO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Info").id(0x1549A966).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SegmentUID").id(0x73A4).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SEGMENTFILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SegmentFilename").id(0x7384).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo PREVUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrevUID").id(0x3CB923).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo PREVFILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrevFilename").id(0x3C83AB).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo NEXTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("NextUID").id(0x3EB923).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo NEXTFILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("NextFilename").id(0x3E83BB).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo SEGMENTFAMILY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SegmentFamily").id(0x4444).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CHAPTERTRANSLATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslate").id(0x6924).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPTERTRANSLATEEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslateEditionUID").id(0x69FC).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTRANSLATECODEC = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslateCodec").id(0x69BF).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTRANSLATEID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslateID").id(0x69A5).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TIMECODESCALE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TimecodeScale").id(0x2AD7B1).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Duration").id(0x4489).level(2).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo DATEUTC = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DateUTC").id(0x4461).level(2).type( EBMLTypeInfo.TYPE.DATE).build(); public static final EBMLTypeInfo TITLE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Title").id(0x7BA9).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo MUXINGAPP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MuxingApp").id(0x4D80).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo WRITINGAPP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("WritingApp").id(0x5741).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CLUSTER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Cluster").id(0x1F43B675).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TIMECODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Timecode").id(0xE7).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SILENTTRACKS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SilentTracks").id(0x5854).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SILENTTRACKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SilentTrackNumber").id(0x58D7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo POSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Position").id(0xA7).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PREVSIZE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrevSize").id(0xAB).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIMPLEBLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SimpleBlock").id(0xA3).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKGROUP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockGroup").id(0xA0).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo BLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Block").id(0xA1).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKVIRTUAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockVirtual").id(0xA2).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKADDITIONS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAdditions").id(0x75A1).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo BLOCKMORE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockMore").id(0xA6).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo BLOCKADDID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAddID").id(0xEE).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo BLOCKADDITIONAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAdditional").id(0xA5).level(5).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockDuration").id(0x9B).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCEPRIORITY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferencePriority").id(0xFA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCEBLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceBlock").id(0xFB).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo REFERENCEVIRTUAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceVirtual").id(0xFD).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo CODECSTATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecState").id(0xA4).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo DISCARDPADDING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DiscardPadding").id(0x75A2).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo SLICES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Slices").id(0x8E).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TIMESLICE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TimeSlice").id(0xE8).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo LACENUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("LaceNumber").id(0xCC).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FRAMENUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FrameNumber").id(0xCD).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo BLOCKADDITIONID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAdditionID").id(0xCB).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DELAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Delay").id(0xCE).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SLICEDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SliceDuration").id(0xCF).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCEFRAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceFrame").id(0xC8).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo REFERENCEOFFSET = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceOffset").id(0xC9).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCETIMECODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceTimeCode").id(0xCA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ENCRYPTEDBLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EncryptedBlock").id(0xAF).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TRACKS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Tracks").id(0x1654AE6B).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKENTRY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackEntry").id(0xAE).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackNumber").id(0xD7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackUID").id(0x73C5).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackType").id(0x83).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGENABLED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagEnabled").id(0xB9).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGDEFAULT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagDefault").id(0x88).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGFORCED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagForced").id(0x55AA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGLACING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagLacing").id(0x9C).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MINCACHE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MinCache").id(0x6DE7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MAXCACHE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxCache").id(0x6DF8).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DEFAULTDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DefaultDuration").id(0x23E383).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DEFAULTDECODEDFIELDDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DefaultDecodedFieldDuration").id(0x234E7A).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTIMECODESCALE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTimecodeScale").id(0x23314F).level(3).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo TRACKOFFSET = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackOffset").id(0x537F).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo MAXBLOCKADDITIONID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxBlockAdditionID").id(0x55EE).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo NAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Name").id(0x536E).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo LANGUAGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Language").id(0x22B59C).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecID").id(0x86).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECPRIVATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecPrivate").id(0x63A2).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CODECNAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecName").id(0x258688).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo ATTACHMENTLINK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AttachmentLink").id(0x7446).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CODECSETTINGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecSettings").id(0x3A9697).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CODECINFOURL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecInfoURL").id(0x3B4040).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECDOWNLOADURL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecDownloadURL").id(0x26B240).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECDECODEALL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecDecodeAll").id(0xAA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKOVERLAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackOverlay").id(0x6FAB).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CODECDELAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecDelay").id(0x56AA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SEEKPREROLL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekPreRoll").id(0x56BB).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTRANSLATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslate").id(0x6624).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKTRANSLATEEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslateEditionUID").id(0x66FC).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTRANSLATECODEC = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslateCodec").id(0x66BF).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTRANSLATETRACKID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslateTrackID").id(0x66A5).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo VIDEO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Video").id(0xE0).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo FLAGINTERLACED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagInterlaced").id(0x9A).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FIELDORDER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FieldOrder").id(0x9D).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo STEREOMODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("StereoMode").id(0x53B8).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ALPHAMODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AlphaMode").id(0x53C0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo OLDSTEREOMODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("OldStereoMode").id(0x53B9).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELWIDTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelWidth").id(0xB0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELHEIGHT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelHeight").id(0xBA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPBOTTOM = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropBottom").id(0x54AA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPTOP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropTop").id(0x54BB).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPLEFT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropLeft").id(0x54CC).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPRIGHT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropRight").id(0x54DD).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DISPLAYWIDTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DisplayWidth").id(0x54B0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DISPLAYHEIGHT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DisplayHeight").id(0x54BA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DISPLAYUNIT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DisplayUnit").id(0x54B2).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ASPECTRATIOTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AspectRatioType").id(0x54B3).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo COLOURSPACE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ColourSpace").id(0x2EB524).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo GAMMAVALUE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("GammaValue").id(0x2FB523).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo FRAMERATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FrameRate").id(0x2383E3).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo COLOUR = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Colour").id(0x55B0).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo MATRIXCOEFFICIENTS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MatrixCoefficients").id(0x55B1).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo BITSPERCHANNEL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BitsPerChannel").id(0x55B2).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASUBSAMPLINGHORZ = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSubsamplingHorz").id(0x55B3).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASUBSAMPLINGVERT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSubsamplingVert").id(0x55B4).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CBSUBSAMPLINGHORZ = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CbSubsamplingHorz").id(0x55B5).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CBSUBSAMPLINGVERT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CbSubsamplingVert").id(0x55B6).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASITINGHORZ = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSitingHorz").id(0x55B7).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASITINGVERT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSitingVert").id(0x55B8).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo RANGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Range").id(0x55B9).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRANSFERCHARACTERISTICS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TransferCharacteristics").id(0x55BA).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PRIMARIES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Primaries").id(0x55BB).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MAXCLL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxCLL").id(0x55BC).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MAXFALL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxFALL").id(0x55BD).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MASTERINGMETADATA = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MasteringMetadata").id(0x55D0).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo PRIMARYRCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryRChromaticityX").id(0x55D1).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYRCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryRChromaticityY").id(0x55D2).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYGCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryGChromaticityX").id(0x55D3).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYGCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryGChromaticityY").id(0x55D4).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYBCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryBChromaticityX").id(0x55D5).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYBCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryBChromaticityY").id(0x55D6).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo WHITEPOINTCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("WhitePointChromaticityX").id(0x55D7).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo WHITEPOINTCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("WhitePointChromaticityY").id(0x55D8).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo LUMINANCEMAX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("LuminanceMax").id(0x55D9).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo LUMINANCEMIN = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("LuminanceMin").id(0x55DA).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo AUDIO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Audio").id(0xE1).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SAMPLINGFREQUENCY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SamplingFrequency").id(0xB5).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo OUTPUTSAMPLINGFREQUENCY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("OutputSamplingFrequency").id(0x78B5).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo CHANNELS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Channels").id(0x9F).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHANNELPOSITIONS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChannelPositions").id(0x7D7B).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BITDEPTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BitDepth").id(0x6264).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKOPERATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackOperation").id(0xE2).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKCOMBINEPLANES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackCombinePlanes").id(0xE3).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKPLANE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackPlane").id(0xE4).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKPLANEUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackPlaneUID").id(0xE5).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKPLANETYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackPlaneType").id(0xE6).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKJOINBLOCKS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackJoinBlocks").id(0xE9).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKJOINUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackJoinUID").id(0xED).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKTRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickTrackUID").id(0xC0).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKTRACKSEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickTrackSegmentUID").id(0xC1).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TRICKTRACKFLAG = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickTrackFlag").id(0xC6).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKMASTERTRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickMasterTrackUID").id(0xC7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKMASTERTRACKSEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickMasterTrackSegmentUID").id(0xC4).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTENCODINGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodings").id(0x6D80).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTENCODING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncoding").id(0x6240).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTENCODINGORDER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodingOrder").id(0x5031).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTENCODINGSCOPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodingScope").id(0x5032).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTENCODINGTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodingType").id(0x5033).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTCOMPRESSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentCompression").id(0x5034).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTCOMPALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentCompAlgo").id(0x4254).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTCOMPSETTINGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentCompSettings").id(0x4255).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTENCRYPTION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncryption").id(0x5035).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTENCALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncAlgo").id(0x47E1).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTENCKEYID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncKeyID").id(0x47E2).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTSIGNATURE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSignature").id(0x47E3).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTSIGKEYID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSigKeyID").id(0x47E4).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTSIGALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSigAlgo").id(0x47E5).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTSIGHASHALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSigHashAlgo").id(0x47E6).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Cues").id(0x1C53BB6B).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUEPOINT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CuePoint").id(0xBB).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUETIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueTime").id(0xB3).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUETRACKPOSITIONS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueTrackPositions").id(0xB7).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUETRACK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueTrack").id(0xF7).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUECLUSTERPOSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueClusterPosition").id(0xF1).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUERELATIVEPOSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRelativePosition").id(0xF0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueDuration").id(0xB2).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEBLOCKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueBlockNumber").id(0x5378).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUECODECSTATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueCodecState").id(0xEA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFERENCE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueReference").id(0xDB).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUEREFTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefTime").id(0x96).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFCLUSTER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefCluster").id(0x97).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefNumber").id(0x535F).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFCODECSTATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefCodecState").id(0xEB).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ATTACHMENTS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Attachments").id(0x1941A469).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo ATTACHEDFILE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AttachedFile").id(0x61A7).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo FILEDESCRIPTION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileDescription").id(0x467E).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo FILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileName").id(0x466E).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo FILEMIMETYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileMimeType").id(0x4660).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo FILEDATA = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileData").id(0x465C).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo FILEUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileUID").id(0x46AE).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FILEREFERRAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileReferral").id(0x4675).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo FILEUSEDSTARTTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileUsedStartTime").id(0x4661).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FILEUSEDENDTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileUsedEndTime").id(0x4662).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Chapters").id(0x1043A770).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo EDITIONENTRY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionEntry").id(0x45B9).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo EDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionUID").id(0x45BC).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EDITIONFLAGHIDDEN = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionFlagHidden").id(0x45BD).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EDITIONFLAGDEFAULT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionFlagDefault").id(0x45DB).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EDITIONFLAGORDERED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionFlagOrdered").id(0x45DD).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERATOM = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterAtom").id(0xB6).level(3).type( EBMLTypeInfo.TYPE.MASTER).isRecursive(true).build(); public static final EBMLTypeInfo CHAPTERUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterUID").id(0x73C4).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERSTRINGUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterStringUID").id(0x5654).level(4).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CHAPTERTIMESTART = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTimeStart").id(0x91).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTIMEEND = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTimeEnd").id(0x92).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERFLAGHIDDEN = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterFlagHidden").id(0x98).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERFLAGENABLED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterFlagEnabled").id(0x4598).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERSEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterSegmentUID").id(0x6E67).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CHAPTERSEGMENTEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterSegmentEditionUID").id(0x6EBC).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERPHYSICALEQUIV = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterPhysicalEquiv").id(0x63C3).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTRACK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTrack").id(0x8F).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPTERTRACKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTrackNumber").id(0x89).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERDISPLAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterDisplay").id(0x80).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPSTRING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapString").id(0x85).level(5).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CHAPLANGUAGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapLanguage").id(0x437C).level(5).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CHAPCOUNTRY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapCountry").id(0x437E).level(5).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CHAPPROCESS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcess").id(0x6944).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPPROCESSCODECID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessCodecID").id(0x6955).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPPROCESSPRIVATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessPrivate").id(0x450D).level(5).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CHAPPROCESSCOMMAND = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessCommand").id(0x6911).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPPROCESSTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessTime").id(0x6922).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPPROCESSDATA = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessData").id(0x6933).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TAGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Tags").id(0x1254C367).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TAG = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Tag").id(0x7373).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TARGETS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Targets").id(0x63C0).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TARGETTYPEVALUE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TargetTypeValue").id(0x68CA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TARGETTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TargetType").id(0x63CA).level(4).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo TAGTRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagTrackUID").id(0x63C5).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagEditionUID").id(0x63C9).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGCHAPTERUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagChapterUID").id(0x63C4).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGATTACHMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagAttachmentUID").id(0x63C6).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIMPLETAG = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SimpleTag").id(0x67C8).level(3).type( EBMLTypeInfo.TYPE.MASTER).isRecursive(true).build(); public static final EBMLTypeInfo TAGNAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagName").id(0x45A3).level(4).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo TAGLANGUAGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagLanguage").id(0x447A).level(4).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo TAGDEFAULT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagDefault").id(0x4484).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGSTRING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagString").id(0x4487).level(4).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo TAGBINARY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagBinary").id(0x4485).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); }
5,446
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ReviewSubscriptionIntegrationTest.java
package com.example.demo; import com.example.demo.generated.client.AddReviewGraphQLQuery; import com.example.demo.generated.client.AddReviewProjectionRoot; import com.example.demo.generated.client.ReviewAddedGraphQLQuery; import com.example.demo.generated.client.ReviewAddedProjectionRoot; import com.example.demo.generated.types.SubmittedReview; import com.netflix.graphql.dgs.client.MonoGraphQLClient; import com.netflix.graphql.dgs.client.WebSocketGraphQLClient; import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.web.server.LocalServerPort; import org.springframework.web.reactive.function.client.WebClient; import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient; import reactor.core.publisher.Flux; import reactor.test.StepVerifier; import java.time.Duration; import java.util.Collections; @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) public class ReviewSubscriptionIntegrationTest { @LocalServerPort private Integer port; private WebSocketGraphQLClient webSocketGraphQLClient; private MonoGraphQLClient graphQLClient; @BeforeEach public void setup() { webSocketGraphQLClient = new WebSocketGraphQLClient("ws://localhost:" + port + "/subscriptions", new ReactorNettyWebSocketClient()); graphQLClient = MonoGraphQLClient.createWithWebClient(WebClient.create(("http://localhost:" + port + "/graphql"))); } @Test public void testWebSocketSubscription() { GraphQLQueryRequest subscriptionRequest = new GraphQLQueryRequest( ReviewAddedGraphQLQuery.newRequest().showId(1).build(), new ReviewAddedProjectionRoot<>().starScore() ); GraphQLQueryRequest addReviewMutation1 = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest().review(SubmittedReview.newBuilder().showId(1).starScore(5).username("DGS User").build()).build(), new AddReviewProjectionRoot<>().starScore() ); GraphQLQueryRequest addReviewMutation2 = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest().review(SubmittedReview.newBuilder().showId(1).starScore(3).username("DGS User").build()).build(), new AddReviewProjectionRoot<>().starScore() ); Flux<Integer> starScore = webSocketGraphQLClient.reactiveExecuteQuery(subscriptionRequest.serialize(), Collections.emptyMap()).map(r -> r.extractValue("reviewAdded.starScore")); StepVerifier.create(starScore) .thenAwait(Duration.ofSeconds(1)) .then(() -> { graphQLClient.reactiveExecuteQuery(addReviewMutation1.serialize(), Collections.emptyMap()).block(); }) .then(() -> graphQLClient.reactiveExecuteQuery(addReviewMutation2.serialize(), Collections.emptyMap()).block()) .expectNext(5) .expectNext(3) .thenCancel() .verify(); } }
5,447
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ArtworkUploadDataFetcherTest.java
package com.example.demo; import com.example.demo.generated.types.Image; import com.jayway.jsonpath.TypeRef; import com.netflix.graphql.dgs.DgsQueryExecutor; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.mock.web.MockMultipartFile; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import static org.assertj.core.api.AssertionsForClassTypes.assertThat; @SpringBootTest class ArtworkUploadDataFetcherTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @Test void addArtwork() { int showId = new Random().nextInt(); Map<String, Object> map = new HashMap<String, Object>() {{ put("showId", showId); put("upload", new MockMultipartFile("test", "test.file", "text/plain", "test".getBytes())); }}; String mutation = "mutation addArtwork($showId:Int!, $upload:Upload!) { addArtwork(showId:$showId, upload:$upload) {url} }"; List<Image> result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( mutation, "data.addArtwork", map, new TypeRef<List<Image>>() { } ); assertThat(result.size()).isNotZero(); assertThat(result.get(0).getUrl()).contains(String.valueOf(showId)); } }
5,448
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/SecurityExampleFetchersTest.java
package com.example.demo; import com.netflix.graphql.dgs.DgsQueryExecutor; import com.netflix.graphql.dgs.exceptions.QueryException; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.security.test.context.support.WithMockUser; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertThrows; @SpringBootTest class SecurityExampleFetchersTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @Test void secureNone() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureNone }", "data.secureNone", String.class); assertThat(result).isEqualTo("Hello to everyone"); } @Test @WithMockUser(username = "user", password = "user") void secureUserWithUser() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureUser }", "data.secureUser", String.class ); assertThat(result).isEqualTo("Hello to users or admins"); } @Test @WithMockUser(username = "admin", password = "admin", roles = {"ADMIN"}) void secureUserWithAdmin() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureUser }", "data.secureUser", String.class ); assertThat(result).isEqualTo("Hello to users or admins"); } @Test void secureUserWithNone() { assertThrows(QueryException.class, () -> { dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureUser }", "data.secureUser", String.class ); }); } @Test @WithMockUser(username = "admin", password = "admin", roles = {"ADMIN"}) void secureAdminWithAdmin() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureAdmin }", "data.secureAdmin", String.class ); assertThat(result).isEqualTo("Hello to admins only"); } @Test @WithMockUser(username = "user", password = "user") void secureAdminWithUser() { assertThrows(QueryException.class, () -> { dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureAdmin }", "data.secureAdmin", String.class ); }); } @Test void secureAdminWithNone() { assertThrows(QueryException.class, () -> { dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureAdmin }", "data.secureAdmin", String.class ); }); } }
5,449
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ReviewSubscriptionTest.java
package com.example.demo; import com.example.demo.datafetchers.ReviewsDataFetcher; import com.example.demo.generated.client.AddReviewGraphQLQuery; import com.example.demo.generated.client.AddReviewProjectionRoot; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.SubmittedReview; import com.example.demo.scalars.DateTimeScalar; import com.example.demo.services.DefaultReviewsService; import com.example.demo.services.ShowsService; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.graphql.dgs.DgsQueryExecutor; import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration; import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest; import graphql.ExecutionResult; import org.junit.jupiter.api.Test; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import static org.assertj.core.api.AssertionsForClassTypes.assertThat; /** * Test the review added subscription. * The subscription query returns a Publisher<ExecutionResult>. * Each time a review is added, a new ExecutionResult is given to subscriber. * Normally, this publisher is consumed by the Websocket/SSE subscription handler and you don't deal with this code directly, but for testing purposes it's useful to use the stream directly. */ @SpringBootTest(classes = {DefaultReviewsService.class, ReviewsDataFetcher.class, DgsAutoConfiguration.class, DateTimeScalar.class}) public class ReviewSubscriptionTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @MockBean ShowsService showsService; @Test void reviewSubscription() { ExecutionResult executionResult = dgsQueryExecutor.execute("subscription { reviewAdded(showId: 1) {starScore} }"); Publisher<ExecutionResult> reviewPublisher = executionResult.getData(); List<Review> reviews = new CopyOnWriteArrayList<>(); reviewPublisher.subscribe(new Subscriber<ExecutionResult>() { @Override public void onSubscribe(Subscription s) { s.request(2); } @Override public void onNext(ExecutionResult executionResult) { if (executionResult.getErrors().size() > 0) { System.out.println(executionResult.getErrors()); } Map<String, Object> review = executionResult.getData(); reviews.add(new ObjectMapper().convertValue(review.get("reviewAdded"), Review.class)); } @Override public void onError(Throwable t) { } @Override public void onComplete() { } }); addReview(); addReview(); assertThat(reviews.size()).isEqualTo(2); } private void addReview() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest() .review( SubmittedReview.newBuilder() .showId(1) .username("testuser") .starScore(5).build()) .build(), new AddReviewProjectionRoot<>() .username() .starScore()); dgsQueryExecutor.execute(graphQLQueryRequest.serialize()); } }
5,450
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ShowsDatafetcherTest.java
package com.example.demo; import com.example.demo.datafetchers.ReviewsDataFetcher; import com.example.demo.datafetchers.ShowsDatafetcher; import com.example.demo.dataloaders.ReviewsDataLoader; import com.example.demo.dataloaders.ReviewsDataLoaderWithContext; import com.example.demo.generated.client.*; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.Show; import com.example.demo.generated.types.SubmittedReview; import com.example.demo.scalars.DateTimeScalar; import com.example.demo.services.DefaultReviewsService; import com.example.demo.services.ShowsService; import com.jayway.jsonpath.TypeRef; import com.netflix.graphql.dgs.DgsQueryExecutor; import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration; import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest; import graphql.ExecutionResult; import org.assertj.core.util.Maps; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import java.time.OffsetDateTime; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.verify; @SpringBootTest(classes = {DgsAutoConfiguration.class, ReviewsDataLoaderWithContext.class, ShowsDatafetcher.class, ReviewsDataFetcher.class, ReviewsDataLoader.class, DateTimeScalar.class}) class ShowsDatafetcherTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @MockBean ShowsService showsService; @MockBean DefaultReviewsService reviewsService; @BeforeEach public void before() { Mockito.when(showsService.shows()) .thenAnswer(invocation -> Collections.singletonList(Show.newBuilder().id(1).title("mock title").releaseYear(2020).build())); Mockito.when(reviewsService.reviewsForShows(Collections.singletonList(1))) .thenAnswer(invocation -> Maps.newHashMap(1, Arrays.asList( Review.newBuilder().username("DGS User").starScore(5).submittedDate(OffsetDateTime.now()).build(), Review.newBuilder().username("DGS User 2").starScore(3).submittedDate(OffsetDateTime.now()).build()) )); } @Test void shows() { List<String> titles = dgsQueryExecutor.executeAndExtractJsonPath( " { shows { title releaseYear }}", "data.shows[*].title"); assertThat(titles).contains("mock title"); } @Test void showsWithException() { Mockito.when(showsService.shows()).thenThrow(new RuntimeException("nothing to see here")); ExecutionResult result = dgsQueryExecutor.execute( " { shows { title releaseYear }}"); assertThat(result.getErrors()).isNotEmpty(); assertThat(result.getErrors().get(0).getMessage()).isEqualTo("java.lang.RuntimeException: nothing to see here"); } @Test void showsWithQueryApi() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(ShowsGraphQLQuery.newRequest().titleFilter("").build(), new ShowsProjectionRoot<>().title()); List<String> titles = dgsQueryExecutor.executeAndExtractJsonPath(graphQLQueryRequest.serialize(), "data.shows[*].title"); assertThat(titles).contains("mock title"); } @Test void showWithReviews() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(ShowsGraphQLQuery.newRequest().titleFilter("").build(), new ShowsProjectionRoot<>() .title() .reviews() .username() .starScore()); List<Show> shows = dgsQueryExecutor.executeAndExtractJsonPathAsObject( graphQLQueryRequest.serialize(), "data.shows[*]", new TypeRef<List<Show>>() { }); assertThat(shows.size()).isEqualTo(1); assertThat(shows.get(0).getReviews().size()).isEqualTo(2); } @Test void addReviewMutation() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest() .review(SubmittedReview.newBuilder() .showId(1) .username("testuser") .starScore(5).build()) .build(), new AddReviewProjectionRoot<>().username().starScore()); ExecutionResult executionResult = dgsQueryExecutor.execute(graphQLQueryRequest.serialize()); assertThat(executionResult.getErrors()).isEmpty(); verify(reviewsService).reviewsForShow(1); } @Test void addReviewsMutation() { List<SubmittedReview> reviews = Collections.singletonList( SubmittedReview.newBuilder().showId(1).username("testuser1").starScore(5).build()); GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest( AddReviewsGraphQLQuery.newRequest() .reviews(reviews) .build(), new AddReviewsProjectionRoot<>().username().starScore()); ExecutionResult executionResult = dgsQueryExecutor.execute(graphQLQueryRequest.serialize()); assertThat(executionResult.getErrors()).isEmpty(); verify(reviewsService).reviewsForShows(Collections.singletonList(1)); } }
5,451
0
Create_ds/dgs-examples-java/src/main/java/com/example
Create_ds/dgs-examples-java/src/main/java/com/example/demo/DemoApplication.java
package com.example.demo; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class DemoApplication { public static void main(String[] args) { SpringApplication.run(DemoApplication.class, args); } /** * Below is an example of using a PreparsedDocumentProvider. * Uncomment to enable */ // @Configuration // static class PreparsedDocumentProviderConfig { // // private final Cache<String, PreparsedDocumentEntry> cache = Caffeine.newBuilder().maximumSize(250) // .expireAfterAccess(5, TimeUnit.MINUTES).recordStats().build(); // // // @Bean // public PreparsedDocumentProvider preparsedDocumentProvider() { // return (executionInput, parseAndValidateFunction) -> { // Function<String, PreparsedDocumentEntry> mapCompute = key -> parseAndValidateFunction.apply(executionInput); // return cache.get(executionInput.getQuery(), mapCompute); // }; // } // } }
5,452
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/directives/UppercaseDirective.java
package com.example.demo.directives; import com.netflix.graphql.dgs.DgsDirective; import graphql.schema.DataFetcher; import graphql.schema.DataFetcherFactories; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLFieldsContainer; import graphql.schema.idl.SchemaDirectiveWiring; import graphql.schema.idl.SchemaDirectiveWiringEnvironment; @DgsDirective(name = "uppercase") public class UppercaseDirective implements SchemaDirectiveWiring { @Override public GraphQLFieldDefinition onField(SchemaDirectiveWiringEnvironment<GraphQLFieldDefinition> env) { GraphQLFieldsContainer fieldsContainer = env.getFieldsContainer(); GraphQLFieldDefinition fieldDefinition = env.getFieldDefinition(); DataFetcher<?> originalDataFetcher = env.getCodeRegistry().getDataFetcher(fieldsContainer, fieldDefinition); DataFetcher<?> dataFetcher = DataFetcherFactories.wrapDataFetcher( originalDataFetcher, (dataFetchingEnvironment, value) -> { if (value instanceof String) { return ((String) value).toUpperCase(); } return value; } ); env.getCodeRegistry().dataFetcher(fieldsContainer, fieldDefinition, dataFetcher); return fieldDefinition; } }
5,453
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/config/SecurityConfig.java
package com.example.demo.config; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.provisioning.InMemoryUserDetailsManager; import org.springframework.security.web.DefaultSecurityFilterChain; import static org.springframework.security.config.Customizer.withDefaults; @Configuration @EnableWebSecurity @EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true) public class SecurityConfig { @Bean DefaultSecurityFilterChain springWebFilterChain(HttpSecurity http) throws Exception { return http .csrf(AbstractHttpConfigurer::disable) .authorizeRequests(requests -> requests .anyRequest().permitAll() ) .httpBasic(withDefaults()) .build(); } @Bean public static InMemoryUserDetailsManager userDetailsService() { User.UserBuilder userBuilder = User.withDefaultPasswordEncoder(); UserDetails user = userBuilder.username("user").password("user").roles("USER").build(); UserDetails admin = userBuilder.username("admin").password("admin").roles("USER", "ADMIN").build(); return new InMemoryUserDetailsManager(user, admin); } }
5,454
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/config/MetricsConfig.java
package com.example.demo.config; import io.micrometer.core.instrument.MeterRegistry; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import io.micrometer.core.instrument.logging.LoggingMeterRegistry; @Configuration public class MetricsConfig { @Bean public MeterRegistry loggingMeterRegistry() { return new LoggingMeterRegistry(); } }
5,455
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/scalars/DateTimeScalar.java
package com.example.demo.scalars; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsRuntimeWiring; import graphql.scalars.ExtendedScalars; import graphql.schema.idl.RuntimeWiring; /** * graphql-java provides optional scalars in the graphql-java-extended-scalars library. * We can wire a scalar from this library by adding the scalar to the RuntimeWiring. */ @DgsComponent public class DateTimeScalar { @DgsRuntimeWiring public RuntimeWiring.Builder addScalar(RuntimeWiring.Builder builder) { return builder.scalar(ExtendedScalars.DateTime); } }
5,456
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/dataloaders/ReviewsDataLoader.java
package com.example.demo.dataloaders; import com.example.demo.generated.types.Review; import com.example.demo.services.DefaultReviewsService; import com.netflix.graphql.dgs.DgsDataLoader; import org.dataloader.MappedBatchLoader; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @DgsDataLoader(name = "reviews") public class ReviewsDataLoader implements MappedBatchLoader<Integer, List<Review>> { private final DefaultReviewsService reviewsService; public ReviewsDataLoader(DefaultReviewsService reviewsService) { this.reviewsService = reviewsService; } /** * This method will be called once, even if multiple datafetchers use the load() method on the DataLoader. * This way reviews can be loaded for all the Shows in a single call instead of per individual Show. */ @Override public CompletionStage<Map<Integer, List<Review>>> load(Set<Integer> keys) { return CompletableFuture.supplyAsync(() -> reviewsService.reviewsForShows(new ArrayList<>(keys))); } }
5,457
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/dataloaders/ReviewsDataLoaderWithContext.java
package com.example.demo.dataloaders; import com.example.demo.generated.types.Review; import com.example.demo.services.DefaultReviewsService; import com.netflix.graphql.dgs.DgsDataLoader; import org.dataloader.BatchLoaderEnvironment; import org.dataloader.MappedBatchLoader; import org.dataloader.MappedBatchLoaderWithContext; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @DgsDataLoader(name = "reviewsWithContext") public class ReviewsDataLoaderWithContext implements MappedBatchLoaderWithContext<Integer, List<Review>> { private final DefaultReviewsService reviewsService; @Autowired public ReviewsDataLoaderWithContext(DefaultReviewsService reviewsService) { this.reviewsService = reviewsService; } @Override public CompletionStage<Map<Integer, List<Review>>> load(Set<Integer> keys, BatchLoaderEnvironment environment) { return CompletableFuture.supplyAsync(() -> reviewsService.reviewsForShows(new ArrayList<>(keys))); } }
5,458
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/instrumentation/ExampleTracingInstrumentation.java
package com.example.demo.instrumentation; import graphql.ExecutionResult; import graphql.execution.instrumentation.InstrumentationContext; import graphql.execution.instrumentation.InstrumentationState; import graphql.execution.instrumentation.SimpleInstrumentation; import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; import graphql.schema.DataFetcher; import graphql.schema.GraphQLNonNull; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.util.concurrent.CompletableFuture; @Component public class ExampleTracingInstrumentation extends SimpleInstrumentation { private final static Logger LOGGER = LoggerFactory.getLogger(ExampleTracingInstrumentation.class); @Override public InstrumentationState createState() { return new TracingState(); } @Override public InstrumentationContext<ExecutionResult> beginExecution(InstrumentationExecutionParameters parameters) { TracingState tracingState = parameters.getInstrumentationState(); tracingState.startTime = System.currentTimeMillis(); return super.beginExecution(parameters); } @Override public DataFetcher<?> instrumentDataFetcher(DataFetcher<?> dataFetcher, InstrumentationFieldFetchParameters parameters) { // We only care about user code if(parameters.isTrivialDataFetcher()) { return dataFetcher; } return environment -> { long startTime = System.currentTimeMillis(); Object result = dataFetcher.get(environment); if(result instanceof CompletableFuture) { ((CompletableFuture<?>) result).whenComplete((r, ex) -> { long totalTime = System.currentTimeMillis() - startTime; LOGGER.info("Async datafetcher {} took {}ms", findDatafetcherTag(parameters), totalTime); }); } else { long totalTime = System.currentTimeMillis() - startTime; LOGGER.info("Datafetcher {} took {}ms", findDatafetcherTag(parameters), totalTime); } return result; }; } @Override public CompletableFuture<ExecutionResult> instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { TracingState tracingState = parameters.getInstrumentationState(); long totalTime = System.currentTimeMillis() - tracingState.startTime; LOGGER.info("Total execution time: {}ms", totalTime); return super.instrumentExecutionResult(executionResult, parameters); } private String findDatafetcherTag(InstrumentationFieldFetchParameters parameters) { GraphQLOutputType type = parameters.getExecutionStepInfo().getParent().getType(); GraphQLObjectType parent; if (type instanceof GraphQLNonNull) { parent = (GraphQLObjectType) ((GraphQLNonNull) type).getWrappedType(); } else { parent = (GraphQLObjectType) type; } return parent.getName() + "." + parameters.getExecutionStepInfo().getPath().getSegmentName(); } static class TracingState implements InstrumentationState { long startTime; } }
5,459
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/ArtworkUploadDataFetcher.java
package com.example.demo.datafetchers; import com.example.demo.generated.types.Image; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsMutation; import com.netflix.graphql.dgs.InputArgument; import org.springframework.web.multipart.MultipartFile; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; @DgsComponent public class ArtworkUploadDataFetcher { @DgsMutation public List<Image> addArtwork(@InputArgument Integer showId, @InputArgument MultipartFile upload) throws IOException { Path uploadDir = Paths.get("uploaded-images"); if (!Files.exists(uploadDir)) { Files.createDirectories(uploadDir); } Path newFile = uploadDir.resolve("show-" + showId + "-" + UUID.randomUUID() + upload.getOriginalFilename().substring(upload.getOriginalFilename().lastIndexOf("."))); try (OutputStream outputStream = Files.newOutputStream(newFile)) { outputStream.write(upload.getBytes()); } return Files.list(uploadDir) .filter(f -> f.getFileName().toString().startsWith("show-" + showId)) .map(f -> f.getFileName().toString()) .map(fileName -> Image.newBuilder().url(fileName).build()).collect(Collectors.toList()); } }
5,460
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/ReviewsDataFetcher.java
package com.example.demo.datafetchers; import com.example.demo.dataloaders.ReviewsDataLoader; import com.example.demo.dataloaders.ReviewsDataLoaderWithContext; import com.example.demo.generated.DgsConstants; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.Show; import com.example.demo.generated.types.SubmittedReview; import com.example.demo.services.DefaultReviewsService; import com.netflix.graphql.dgs.*; import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.reactivestreams.Publisher; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; @DgsComponent public class ReviewsDataFetcher { private final DefaultReviewsService reviewsService; public ReviewsDataFetcher(DefaultReviewsService reviewsService) { this.reviewsService = reviewsService; } /** * This datafetcher will be called to resolve the "reviews" field on a Show. * It's invoked for each individual Show, so if we would load 10 shows, this method gets called 10 times. * To avoid the N+1 problem this datafetcher uses a DataLoader. * Although the DataLoader is called for each individual show ID, it will batch up the actual loading to a single method call to the "load" method in the ReviewsDataLoader. * For this to work correctly, the datafetcher needs to return a CompletableFuture. */ @DgsData(parentType = DgsConstants.SHOW.TYPE_NAME, field = DgsConstants.SHOW.Reviews) public CompletableFuture<List<Review>> reviews(DgsDataFetchingEnvironment dfe) { //Instead of loading a DataLoader by name, we can use the DgsDataFetchingEnvironment and pass in the DataLoader classname. DataLoader<Integer, List<Review>> reviewsDataLoader = dfe.getDataLoader(ReviewsDataLoaderWithContext.class); //Because the reviews field is on Show, the getSource() method will return the Show instance. Show show = dfe.getSource(); //Load the reviews from the DataLoader. This call is async and will be batched by the DataLoader mechanism. return reviewsDataLoader.load(show.getId()); } @DgsMutation public List<Review> addReview(@InputArgument SubmittedReview review) { reviewsService.saveReview(review); List<Review> reviews = reviewsService.reviewsForShow(review.getShowId()); return Optional.ofNullable(reviews).orElse(Collections.emptyList()); } @DgsMutation public List<Review> addReviews(@InputArgument(value = "reviews", collectionType = SubmittedReview.class) List<SubmittedReview> reviewsInput) { reviewsService.saveReviews(reviewsInput); List<Integer> showIds = reviewsInput.stream().map(SubmittedReview::getShowId).collect(Collectors.toList()); Map<Integer, List<Review>> reviews = reviewsService.reviewsForShows(showIds); return reviews.values().stream().flatMap(List::stream).collect(Collectors.toList()); } @DgsSubscription public Publisher<Review> reviewAdded(@InputArgument Integer showId) { return reviewsService.getReviewsPublisher(); } }
5,461
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/ShowsDatafetcher.java
package com.example.demo.datafetchers; import com.example.demo.generated.types.Show; import com.example.demo.services.ShowsService; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsQuery; import com.netflix.graphql.dgs.InputArgument; import java.util.List; import java.util.stream.Collectors; @DgsComponent public class ShowsDatafetcher { private final ShowsService showsService; public ShowsDatafetcher(ShowsService showsService) { this.showsService = showsService; } /** * This datafetcher resolves the shows field on Query. * It uses an @InputArgument to get the titleFilter from the Query if one is defined. */ @DgsQuery public List<Show> shows(@InputArgument("titleFilter") String titleFilter) { if (titleFilter == null) { return showsService.shows(); } return showsService.shows().stream().filter(s -> s.getTitle().contains(titleFilter)).collect(Collectors.toList()); } }
5,462
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/SecurityExampleFetchers.java
package com.example.demo.datafetchers; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsQuery; import org.springframework.security.access.annotation.Secured; @DgsComponent public class SecurityExampleFetchers { @DgsQuery public String secureNone() { return "Hello to everyone"; } @DgsQuery @Secured({"ROLE_USER", "ROLE_ADMIN"}) public String secureUser() { return "Hello to users or admins"; } @DgsQuery @Secured({"ROLE_ADMIN"}) public String secureAdmin() { return "Hello to admins only"; } }
5,463
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/ShowsServiceImpl.java
package com.example.demo.services; import com.example.demo.generated.types.Show; import org.springframework.stereotype.Service; import java.util.Arrays; import java.util.List; @Service public class ShowsServiceImpl implements ShowsService { @Override public List<Show> shows() { return Arrays.asList( Show.newBuilder().id(1).title("Stranger Things").releaseYear(2016).build(), Show.newBuilder().id(2).title("Ozark").releaseYear(2017).build(), Show.newBuilder().id(3).title("The Crown").releaseYear(2016).build(), Show.newBuilder().id(4).title("Dead to Me").releaseYear(2019).build(), Show.newBuilder().id(5).title("Orange is the New Black").releaseYear(2013).build() ); } }
5,464
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/ReviewsService.java
package com.example.demo.services; public interface ReviewsService { }
5,465
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/ShowsService.java
package com.example.demo.services; import com.example.demo.generated.types.Show; import java.util.List; public interface ShowsService { List<Show> shows(); }
5,466
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/DefaultReviewsService.java
package com.example.demo.services; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.SubmittedReview; import net.datafaker.Faker; import org.reactivestreams.Publisher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import reactor.core.publisher.ConnectableFlux; import reactor.core.publisher.Flux; import reactor.core.publisher.FluxSink; import jakarta.annotation.PostConstruct; import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * This service emulates a data store. * For convenience in the demo we just generate Reviews in memory, but imagine this would be backed by for example a database. * If this was indeed backed by a database, it would be very important to avoid the N+1 problem, which means we need to use a DataLoader to call this class. */ @Service public class DefaultReviewsService implements ReviewsService { private final static Logger logger = LoggerFactory.getLogger(DefaultReviewsService.class); private final ShowsService showsService; private final Map<Integer, List<Review>> reviews = new ConcurrentHashMap<>(); private FluxSink<Review> reviewsStream; private ConnectableFlux<Review> reviewsPublisher; public DefaultReviewsService(ShowsService showsService) { this.showsService = showsService; } @PostConstruct private void createReviews() { Faker faker = new Faker(); //For each show we generate a random set of reviews. showsService.shows().forEach(show -> { List<Review> generatedReviews = IntStream.range(0, faker.number().numberBetween(1, 20)).mapToObj(number -> { LocalDateTime date = faker.date().past(300, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime(); return Review.newBuilder().submittedDate(OffsetDateTime.of(date, ZoneOffset.UTC)).username(faker.name().username()).starScore(faker.number().numberBetween(0, 6)).build(); }).collect(Collectors.toList()); reviews.put(show.getId(), generatedReviews); }); Flux<Review> publisher = Flux.create(emitter -> { reviewsStream = emitter; }); reviewsPublisher = publisher.publish(); reviewsPublisher.connect(); } /** * Hopefully nobody calls this for multiple shows within a single query, that would indicate the N+1 problem! */ public List<Review> reviewsForShow(Integer showId) { return reviews.get(showId); } /** * This is the method we want to call when loading reviews for multiple shows. * If this code was backed by a relational database, it would select reviews for all requested shows in a single SQL query. */ public Map<Integer, List<Review>> reviewsForShows(List<Integer> showIds) { logger.info("Loading reviews for shows {}", showIds.stream().map(String::valueOf).collect(Collectors.joining(", "))); return reviews .entrySet() .stream() .filter(entry -> showIds.contains(entry.getKey())).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } public void saveReview(SubmittedReview reviewInput) { List<Review> reviewsForShow = reviews.computeIfAbsent(reviewInput.getShowId(), (key) -> new ArrayList<>()); Review review = Review.newBuilder() .username(reviewInput.getUsername()) .starScore(reviewInput.getStarScore()) .submittedDate(OffsetDateTime.now()).build(); reviewsForShow.add(review); reviewsStream.next(review); logger.info("Review added {}", review); } public void saveReviews(List<SubmittedReview> reviewsInput) { reviewsInput.forEach(reviewInput -> { List<Review> reviewsForShow = reviews.computeIfAbsent(reviewInput.getShowId(), (key) -> new ArrayList<>()); Review review = Review.newBuilder() .username(reviewInput.getUsername()) .starScore(reviewInput.getStarScore()) .submittedDate(OffsetDateTime.now()).build(); reviewsForShow.add(review); reviewsStream.next(review); logger.info("Review added {}", review); }); } public Publisher<Review> getReviewsPublisher() { return reviewsPublisher; } }
5,467
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/SuggestionServiceMain.java
package com.airbnb.suggest; import com.airbnb.suggest.rest.RestModule; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; import com.twitter.common.application.AbstractApplication; import com.twitter.common.application.Lifecycle; import com.twitter.common.application.modules.HttpModule; import com.twitter.common.application.modules.LogModule; import com.twitter.common.application.modules.StatsModule; import com.twitter.common.args.Arg; import com.twitter.common.args.CmdLine; import com.twitter.common.args.constraints.NotNull; import com.twitter.common.net.http.GuiceServletConfig; import com.twitter.common.net.http.HttpServerDispatch; import org.mortbay.jetty.servlet.Context; import javax.inject.Inject; import java.util.Arrays; import java.util.logging.Logger; /** * @author Tobi Knaup */ public final class SuggestionServiceMain extends AbstractApplication { @CmdLine(name = "server_set_path", help = "Joins the set of nodes located under this path in ZK") public static final Arg<String> SERVER_SET_PATH = Arg.create("/airbnb/service/suggest"); @NotNull @CmdLine(name = "register_service", help = "Whether this instance should register itself in ZK") public static final Arg<Boolean> REGISTER_SERVICE = Arg.create(true); @Inject private Logger logger; @Inject private Lifecycle lifecycle; @Inject private HttpServerDispatch httpServer; @Inject private GuiceServletConfig servletConfig; @Override public void run() { logger.info("Service started"); addRestSupport(); lifecycle.awaitShutdown(); } @Override public Iterable<? extends Module> getModules() { return Arrays.asList( new HttpModule(), new LogModule(), new RestModule(), new StatsModule() ); } private void addRestSupport() { Context context = httpServer.getRootContext(); context.addFilter(GuiceFilter.class, "/suggest/*", 0); context.addEventListener(servletConfig); } }
5,468
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/model/Place.java
package com.airbnb.suggest.model; /** * A POJO for a place * * @author Tobi Knaup */ public class Place { private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public String toString() { return String.format("Place{name='%s'}", name); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Place place = (Place) o; if (name != null ? !name.equals(place.name) : place.name != null) return false; return true; } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } }
5,469
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/SuggestionResource.java
package com.airbnb.suggest.rest; import com.airbnb.suggest.model.Place; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Logger; @Path("/suggest/v1") @Produces(MediaType.APPLICATION_JSON) public class SuggestionResource { private final Logger logger; private final List<Place> places; @Inject public SuggestionResource(Logger logger) { this.logger = logger; this.places = new ArrayList<Place>(); } @POST @Path("like") public void like(Place place) { logger.info(place.toString()); if (!places.contains(place)) { places.add(place); } } @GET @Path("suggest") public Place suggest() { Collections.shuffle(places); Place place = places.get(0); logger.info(place.toString()); return place; } }
5,470
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/RestModule.java
package com.airbnb.suggest.rest; import com.airbnb.suggest.rest.util.JsonExceptionMapper; import com.airbnb.suggest.rest.util.RequestStatsFilter; import com.google.inject.Singleton; import com.google.inject.name.Names; import com.google.inject.servlet.ServletModule; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import org.codehaus.jackson.jaxrs.JacksonJsonProvider; import java.util.HashSet; import java.util.Set; /** * Configures our REST service * * @author Tobi Knaup */ public class RestModule extends ServletModule { @Override protected void configureServlets() { super.configureServlets(); // JSON mapper, maps JSON to/from POJOs bind(JacksonJsonProvider.class).in(Singleton.class); // Turns exceptions into JSON responses bind(JsonExceptionMapper.class).in(Singleton.class); // Serve all URLs through Guice serve("/*").with(GuiceContainer.class); // The actual REST Endpoints bind(SuggestionResource.class).in(Singleton.class); // Stats filter("/suggest/v1/like").through(new RequestStatsFilter("suggest_v1_like")); filter("/suggest/v1/all_places").through(new RequestStatsFilter("suggest_v1_all_places")); } }
5,471
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/util/JsonExceptionMapper.java
package com.airbnb.suggest.rest.util; import com.google.common.collect.Maps; import javax.inject.Inject; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; /** * Exception mapper that returns the exception as JSON * * @author Tobi Knaup */ @Provider public class JsonExceptionMapper implements ExceptionMapper<Exception> { private Logger logger; @Inject public JsonExceptionMapper(Logger logger) { this.logger = logger; } public Response toResponse(final Exception e) { String message = e.getMessage(); String type = e.getClass().getSimpleName(); Map<String, String> entity = Maps.newHashMap(); entity.put("type", type); entity.put("message", message); // Log it too logger.log(Level.WARNING, type, e); return Response .status(Response.Status.INTERNAL_SERVER_ERROR) .entity(entity) .type(MediaType.APPLICATION_JSON_TYPE) .build(); } }
5,472
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/util/RequestStatsFilter.java
package com.airbnb.suggest.rest.util; import com.twitter.common.stats.RequestStats; import javax.servlet.*; import java.io.IOException; /** * A servlet filter that uses Twitter Commons request stats to keep tabs on requests * * @author Tobi Knaup */ public final class RequestStatsFilter implements Filter { final RequestStats requestStats; public RequestStatsFilter(String statName) { this.requestStats = new RequestStats(statName); } @Override public void init(FilterConfig filterConfig) throws ServletException { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { Long start = System.nanoTime(); chain.doFilter(request, response); // Takes micros requestStats.requestComplete((System.nanoTime() - start) / 1000); } @Override public void destroy() { } }
5,473
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/UnitTestModule.java
package com.netflix.raigad.configuration; import com.google.inject.AbstractModule; import com.google.inject.Scopes; import com.google.inject.name.Names; import com.netflix.raigad.backup.AbstractRepository; import com.netflix.raigad.backup.S3Repository; import org.junit.Ignore; import org.quartz.SchedulerFactory; import org.quartz.impl.StdSchedulerFactory; @Ignore public class UnitTestModule extends AbstractModule { @Override protected void configure() { bind(IConfiguration.class).toInstance(new FakeConfiguration(FakeConfiguration.FAKE_REGION, "fake-app", "az1", "fakeInstance1")); bind(SchedulerFactory.class).to(StdSchedulerFactory.class).in(Scopes.SINGLETON); bind(AbstractRepository.class).annotatedWith(Names.named("s3")).to(S3Repository.class); } }
5,474
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestAbstractConfigSource.java
package com.netflix.raigad.configuration; import com.google.common.collect.ImmutableList; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import static org.junit.Assert.assertEquals; public class TestAbstractConfigSource { private static final Logger LOGGER = LoggerFactory.getLogger(TestAbstractConfigSource.class.getName()); @Test public void lists() { AbstractConfigSource source = new MemoryConfigSource(); source.set("foo", "bar,baz, qux "); final List<String> values = source.getList("foo"); LOGGER.info("Values {}", values); assertEquals(ImmutableList.of("bar", "baz", "qux"), values); } @Test public void oneItem() { AbstractConfigSource source = new MemoryConfigSource(); source.set("foo", "bar"); final List<String> values = source.getList("foo"); LOGGER.info("Values {}", values); assertEquals(ImmutableList.of("bar"), values); } @Test public void oneItemWithSpace() { AbstractConfigSource source = new MemoryConfigSource(); source.set("foo", "\tbar "); final List<String> values = source.getList("foo"); LOGGER.info("Values {}", values); assertEquals(ImmutableList.of("bar"), values); } }
5,475
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/FakeConfiguration.java
package com.netflix.raigad.configuration; import java.util.List; public class FakeConfiguration implements IConfiguration { public static final String FAKE_REGION = "us-east-1"; public static final String INDEX_METADATA = "[{\"retentionType\":\"daily\",\"retentionPeriod\":5,\"indexName\":\"test_index\",\"preCreate\":\"true\"}]"; public static final String ES_PROCESS_NAME = "org.elasticsearch.bootstrap.Elasticsearch"; public String region; public String appName; public String zone; public String instanceId; public FakeConfiguration() { this(FAKE_REGION, "my_fake_cluster", "my_zone", "i-01234567890123456"); } public FakeConfiguration(String region, String appName, String zone, String instanceId) { this.region = region; this.appName = appName; this.zone = zone; this.instanceId = instanceId; } @Override public void initialize() { } @Override public String getElasticsearchHome() { return null; } @Override public String getYamlLocation() { return null; } @Override public String getBackupLocation() { return "es-backup-test"; } @Override public String getElasticsearchStartupScript() { return null; } @Override public String getElasticsearchStopScript() { return null; } @Override public int getTransportTcpPort() { return 0; } @Override public int getHttpPort() { return 0; } @Override public int getNumOfShards() { return 0; } @Override public int getNumOfReplicas() { return 0; } @Override public int getTotalShardsPerNode() { return 0; } @Override public String getRefreshInterval() { return null; } @Override public boolean isMasterQuorumEnabled() { return false; } @Override public int getMinimumMasterNodes() { return 0; } @Override public String getPingTimeout() { return null; } @Override public boolean isPingMulticastEnabled() { return false; } @Override public String getFdPingInterval() { return null; } @Override public String getFdPingTimeout() { return null; } @Override public String getDataFileLocation() { return null; } @Override public String getLogFileLocation() { return null; } @Override public boolean doesElasticsearchStartManually() { return false; } @Override public String getAppName() { return appName; } @Override public String getRac() { return null; } @Override public List<String> getRacs() { return null; } @Override public String getHostname() { return null; } @Override public String getInstanceName() { return null; } @Override public String getInstanceId() { return null; } @Override public String getDC() { return "us-east-1"; } @Override public void setDC(String dc) { } @Override public String getASGName() { return null; } @Override public String getStackName() { return null; } @Override public String getACLGroupName() { return null; } @Override public String getHostIP() { return null; } @Override public String getHostLocalIP() { return null; } @Override public String getBootClusterName() { return null; } @Override public String getElasticsearchProcessName() { return ES_PROCESS_NAME; } @Override public String getElasticsearchDiscoveryType() { return null; } @Override public boolean isMultiDC() { return false; } @Override public String getIndexRefreshInterval() { return null; } @Override public String getClusterRoutingAttributes() { return null; } @Override public boolean isAsgBasedDedicatedDeployment() { return false; } @Override public boolean isCustomShardAllocationPolicyEnabled() { return false; } @Override public String getClusterShardAllocationAttribute() { return null; } @Override public String getExtraConfigParams() { return null; } @Override public String getEsKeyName(String escarKey) { return null; } @Override public boolean isDebugEnabled() { return false; } @Override public boolean isShardPerNodeEnabled() { return false; } @Override public boolean isIndexAutoCreationEnabled() { return false; } @Override public String getIndexMetadata() { return INDEX_METADATA; } @Override public int getAutoCreateIndexTimeout() { return 3000; } @Override public int getAutoCreateIndexInitialStartDelaySeconds() { return 0; } @Override public int getAutoCreateIndexScheduleMinutes() { return 0; } @Override public boolean isSnapshotBackupEnabled() { return false; } @Override public String getCommaSeparatedIndicesToBackup() { return "_all"; } @Override public boolean partiallyBackupIndices() { return false; } @Override public boolean includeGlobalStateDuringBackup() { return false; } @Override public boolean waitForCompletionOfBackup() { return true; } @Override public boolean includeIndexNameInSnapshot() { return false; } @Override public boolean isHourlySnapshotEnabled() { return false; } @Override public long getBackupCronTimerInSeconds() { return 0; } @Override public int getBackupHour() { return 0; } @Override public boolean isRestoreEnabled() { return false; } @Override public String getRestoreRepositoryName() { return null; } @Override public String getRestoreSourceClusterName() { return "fake-app"; } @Override public String getRestoreSourceRepositoryRegion() { return null; } @Override public String getRestoreLocation() { return null; } @Override public String getRestoreRepositoryType() { return null; } @Override public String getRestoreSnapshotName() { return null; } @Override public String getCommaSeparatedIndicesToRestore() { return null; } @Override public int getRestoreTaskInitialDelayInSeconds() { return 0; } @Override public boolean amITribeNode() { return false; } @Override public boolean amIWriteEnabledTribeNode() { return false; } @Override public boolean amIMetadataEnabledTribeNode() { return false; } @Override public String getCommaSeparatedSourceClustersForTribeNode() { return null; } @Override public boolean amISourceClusterForTribeNode() { return false; } @Override public String getCommaSeparatedTribeClusterNames() { return null; } @Override public boolean isNodeMismatchWithDiscoveryEnabled() { return false; } @Override public int getDesiredNumberOfNodesInCluster() { return 0; } @Override public boolean isEurekaHealthCheckEnabled() { return false; } @Override public boolean isLocalModeEnabled() { return false; } @Override public String getCassandraKeyspaceName() { return null; } @Override public int getCassandraThriftPortForAstyanax() { return 0; } @Override public boolean isEurekaHostSupplierEnabled() { return false; } @Override public String getCommaSeparatedCassandraHostNames() { return null; } @Override public boolean isSecurityGroupInMultiDC() { return false; } @Override public boolean isKibanaSetupRequired() { return false; } @Override public int getKibanaPort() { return 0; } public boolean amISourceClusterForTribeNodeInMultiDC() { return false; } @Override public boolean reportMetricsFromMasterOnly() { return false; } @Override public String getTribePreferredClusterIdOnConflict() { return null; } @Override public String getEsNodeName() { return null; } @Override public boolean isDeployedInVPC() { return false; } @Override public boolean isVPCExternal() { return false; } @Override public String getACLGroupNameForVPC() { return null; } @Override public String getACLGroupIdForVPC() { return null; } @Override public void setACLGroupIdForVPC(String aclGroupIdForVPC) { } @Override public String getMacIdForInstance() { return null; } }
5,476
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestSystemPropertiesConfigSource.java
package com.netflix.raigad.configuration; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestSystemPropertiesConfigSource { @Test public void read() { final String key = "java.version"; SystemPropertiesConfigSource configSource = new SystemPropertiesConfigSource(); configSource.initialize("asgName", "region"); // sys props are filtered to starting with escar, so this should be missing. assertEquals(null, configSource.get(key)); assertEquals(0, configSource.size()); } }
5,477
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestCompositeConfigSource.java
package com.netflix.raigad.configuration; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestCompositeConfigSource { @Test public void read() { MemoryConfigSource memoryConfigSource = new MemoryConfigSource(); IConfigSource configSource = new CompositeConfigSource(memoryConfigSource); configSource.initialize("foo", "bar"); assertEquals(0, configSource.size()); configSource.set("foo", "bar"); assertEquals(1, configSource.size()); assertEquals("bar", configSource.get("foo")); // verify that the writes went to mem source. assertEquals(1, memoryConfigSource.size()); assertEquals("bar", memoryConfigSource.get("foo")); } @Test public void readMultiple() { MemoryConfigSource m1 = new MemoryConfigSource(); m1.set("foo", "foo"); MemoryConfigSource m2 = new MemoryConfigSource(); m2.set("bar", "bar"); MemoryConfigSource m3 = new MemoryConfigSource(); m3.set("baz", "baz"); IConfigSource configSource = new CompositeConfigSource(m1, m2, m3); assertEquals(3, configSource.size()); assertEquals("foo", configSource.get("foo")); assertEquals("bar", configSource.get("bar")); assertEquals("baz", configSource.get("baz")); // read default assertEquals("test", configSource.get("doesnotexist", "test")); } }
5,478
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestPropertiesConfigSource.java
package com.netflix.raigad.configuration; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestPropertiesConfigSource { @Test public void readFile() { PropertiesConfigSource configSource = new PropertiesConfigSource("conf/raigad.properties"); configSource.initialize("asgName", "region"); assertEquals("\"/tmp/data\"", configSource.get("Raigad.path.data")); assertEquals(9001, configSource.get("Raigad.transport.tcp.port", 0)); // File has 5 lines, but line 6 is "Raigad.http.port9002", so it gets filtered out with empty string check. assertEquals(4, configSource.size()); } @Test public void updateKey() { PropertiesConfigSource configSource = new PropertiesConfigSource("conf/raigad.properties"); configSource.initialize("asgName", "region"); // File has 5 lines, but line 2 is "escar.http.port9002", so it gets filtered out with empty string check. assertEquals(4, configSource.size()); configSource.set("foo", "bar"); assertEquals(5, configSource.size()); assertEquals("bar", configSource.get("foo")); assertEquals(9001, configSource.get("Raigad.transport.tcp.port", 0)); configSource.set("Raigad.transport.tcp.port", Integer.toString(10)); assertEquals(10, configSource.get("Raigad.transport.tcp.port", 0)); } }
5,479
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/resources/TestElasticsearchConfig.java
package com.netflix.raigad.resources; import com.netflix.raigad.configuration.CustomConfigSource; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.identity.InstanceManager; import com.netflix.raigad.identity.RaigadInstance; import com.netflix.raigad.startup.RaigadServer; import com.netflix.raigad.utils.TribeUtils; import org.junit.Before; import org.junit.Test; import javax.ws.rs.core.Response; import java.util.Collections; import java.util.List; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; public class TestElasticsearchConfig { private TribeUtils tribeUtils; private IConfiguration config; @Before public void setUp() { tribeUtils = mock(TribeUtils.class); config = mock(IConfiguration.class); } @Test public void getNodes() { RaigadInstance raigadInstance1 = new RaigadInstance(); raigadInstance1.setApp("fake-app1"); RaigadInstance raigadInstance2 = new RaigadInstance(); raigadInstance2.setApp("fake-app2"); RaigadInstance raigadInstance3 = new RaigadInstance(); raigadInstance3.setApp("fake-app3"); final List<RaigadInstance> nodes = asList(raigadInstance1, raigadInstance2, raigadInstance3); InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenReturn(nodes); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(200, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } @Test public void getNodes_notFound() { InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenReturn(Collections.emptyList()); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(200, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } @Test public void getNodes_Error() { InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenReturn(null); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(500, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } @Test public void getNodes_handlesUnknownHostException() { InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenThrow(new RuntimeException()); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(500, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } }
5,480
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/scheduler/TestGuiceSingleton.java
package com.netflix.raigad.scheduler; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Singleton; import org.junit.Test; public class TestGuiceSingleton { public static class GModules extends AbstractModule { @Override protected void configure() { bind(EmptyInterface.class).to(GuiceSingleton.class).asEagerSingleton(); } } public interface EmptyInterface { public String print(); } @Singleton public static class GuiceSingleton implements EmptyInterface { public String print() { System.out.println(this.toString()); return this.toString(); } } @Test public void testSingleton() { Injector injector = Guice.createInjector(new GModules()); injector.getInstance(EmptyInterface.class).print(); injector.getInstance(EmptyInterface.class).print(); injector.getInstance(EmptyInterface.class).print(); printInjected(); printInjected(); printInjected(); printInjected(); } public void printInjected() { Injector injector = Guice.createInjector(new GModules()); injector.getInstance(EmptyInterface.class).print(); } }
5,481
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/scheduler/TestScheduler.java
package com.netflix.raigad.scheduler; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.configuration.UnitTestModule; import org.junit.Ignore; import org.junit.Test; import javax.management.MBeanServerFactory; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; public class TestScheduler { private static CountDownLatch latch; @Test public void testSchedule() throws Exception { latch = new CountDownLatch(1); Injector inject = Guice.createInjector(new UnitTestModule()); RaigadScheduler scheduler = inject.getInstance(RaigadScheduler.class); scheduler.start(); scheduler.addTask("test", TestTask.class, new SimpleTimer("testtask", 10)); // verify the task has run or fail in 1s latch.await(1000, TimeUnit.MILLISECONDS); scheduler.shutdown(); } @Test public void testSingleInstanceSchedule() throws Exception { latch = new CountDownLatch(3); Injector inject = Guice.createInjector(new UnitTestModule()); RaigadScheduler scheduler = inject.getInstance(RaigadScheduler.class); scheduler.start(); scheduler.addTask("test2", SingleTestTask.class, SingleTestTask.getTimer()); // verify 3 tasks run or fail in 1s latch.await(4000, TimeUnit.MILLISECONDS); scheduler.shutdown(); assertEquals(3, SingleTestTask.count); } @Ignore public static class TestTask extends Task { @Inject public TestTask(IConfiguration config) { // todo: mock the MBeanServer instead, but this will prevent exceptions due to duplicate registrations super(config, MBeanServerFactory.newMBeanServer()); } @Override public void execute() { latch.countDown(); } @Override public String getName() { return "test"; } } @Ignore @Singleton public static class SingleTestTask extends Task { @Inject public SingleTestTask(IConfiguration config) { super(config, MBeanServerFactory.newMBeanServer()); } public static int count = 0; @Override public void execute() { ++count; latch.countDown(); try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } } @Override public String getName() { return "test2"; } public static TaskTimer getTimer() { return new SimpleTimer("test2", 11L); } } }
5,482
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/utils/FakeSleeper.java
package com.netflix.raigad.utils; public class FakeSleeper implements Sleeper { @Override public void sleep(long waitTimeMs) throws InterruptedException { // no-op } public void sleepQuietly(long waitTimeMs) { //no-op } }
5,483
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/utils/TestElasticsearchUtils.java
package com.netflix.raigad.utils; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.identity.RaigadInstance; import mockit.Expectations; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; import org.json.simple.JSONObject; import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.List; public class TestElasticsearchUtils { @org.mockito.Mock @Mocked IConfiguration config; @Test public void TestInstanceToJson() { System.out.println("Starting a test..."); List<RaigadInstance> instances = getRaigadInstances(); JSONObject jsonInstances = ElasticsearchUtils.transformRaigadInstanceToJson(instances); System.out.println(jsonInstances); List<RaigadInstance> returnedInstances = ElasticsearchUtils.getRaigadInstancesFromJson(jsonInstances); System.out.println("Number of returned instances = " + returnedInstances.size()); for (RaigadInstance raigadInstance : returnedInstances) { System.out.println("-->" + raigadInstance); } } @Test public void TestAmIMasterNode() throws Exception { String expectedIp = "100.0.0.1"; new Expectations() { { config.getHostIP(); result = expectedIp; times = 1; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return expectedIp; } }; Assert.assertTrue(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeWithWhitespace() throws Exception { String expectedIp = "100.0.0.1"; new Expectations() { { config.getHostIP(); result = expectedIp; times = 1; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return expectedIp + " \n "; } }; Assert.assertTrue(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeExternalIp() throws Exception { String expectedLocalIp = "100.0.0.1"; String expectedExternalIp = "54.0.0.1"; new Expectations() { { config.getHostIP(); result = expectedExternalIp; times = 1; config.getHostLocalIP(); result = expectedLocalIp; times = 1; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return expectedLocalIp; } }; Assert.assertTrue(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeNegative() throws Exception { String expectedIp = "100.0.0.1"; String returnedIp = "100.0.0.2"; new Expectations() { { config.getHostIP(); result = expectedIp; times = 1; config.getHostLocalIP(); result = expectedIp; times = 1; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return returnedIp; } }; Assert.assertFalse(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeNegativeNull() throws Exception { new Expectations() { { config.getHostIP(); times = 0; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return null; } }; Assert.assertFalse(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeNegativeEmpty() throws Exception { new Expectations() { { config.getHostIP(); times = 0; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return ""; } }; Assert.assertFalse(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } public static List<RaigadInstance> getRaigadInstances() { List<RaigadInstance> instances = new ArrayList<RaigadInstance>(); for (int i = 0; i < 3; i++) { RaigadInstance raigadInstance = new RaigadInstance(); raigadInstance.setApp("cluster-" + i); raigadInstance.setAvailabilityZone("1d"); raigadInstance.setDC("us-east1"); raigadInstance.setHostIP("127.0.0." + i); raigadInstance.setHostName("host-" + i); raigadInstance.setId("id-" + i); raigadInstance.setInstanceId("instance-" + i); raigadInstance.setUpdatetime(12345567); instances.add(raigadInstance); } return instances; } }
5,484
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/utils/TestElasticsearchProcessMonitor.java
package com.netflix.raigad.utils; import com.netflix.raigad.configuration.FakeConfiguration; import com.netflix.raigad.configuration.IConfiguration; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.management.ObjectName; import java.io.IOException; import java.io.InputStream; import java.lang.management.ManagementFactory; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.*; public class TestElasticsearchProcessMonitor { private static String ELASTICSEARCH_PROCESS_NAME = FakeConfiguration.ES_PROCESS_NAME; private Process pgrepProcess; private InputStream processInputStream; private ElasticsearchProcessMonitor elasticsearchProcessMonitor; @Before public void setUp() throws IOException { processInputStream = mock(InputStream.class); pgrepProcess = mock(Process.class); when(pgrepProcess.getInputStream()).thenReturn(processInputStream); Runtime runtime = mock(Runtime.class); when(runtime.exec(anyString())).thenReturn(pgrepProcess); elasticsearchProcessMonitor = spy(new ElasticsearchProcessMonitor(mock(IConfiguration.class))); doReturn(runtime).when(elasticsearchProcessMonitor).getRuntime(); } @After public void cleanUp() throws Exception { ManagementFactory.getPlatformMBeanServer().unregisterMBean( new ObjectName("com.netflix.raigad.scheduler:type=" + ElasticsearchProcessMonitor.class.getName())); ElasticsearchProcessMonitor.isElasticsearchRunningNow.set(false); ElasticsearchProcessMonitor.wasElasticsearchStarted.set(false); } @Test public void testNullInputStream() throws Exception { doReturn(null).when(elasticsearchProcessMonitor).getFirstLine(processInputStream); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertFalse(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertFalse(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } @Test public void testEmptyInputStream() throws Exception { doReturn("").when(elasticsearchProcessMonitor).getFirstLine(processInputStream); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertFalse(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertFalse(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } @Test public void testValidInputStream() throws Exception { doReturn("1234").when(elasticsearchProcessMonitor).getFirstLine(processInputStream); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertTrue(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertTrue(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } @Test public void testElasticsearchWasStarted() throws Exception { doReturn("").when(elasticsearchProcessMonitor).getFirstLine(processInputStream); ElasticsearchProcessMonitor.isElasticsearchRunningNow.set(true); ElasticsearchProcessMonitor.wasElasticsearchStarted.set(true); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertFalse(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertTrue(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } }
5,485
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/defaultimpl/TestStandardTuner.java
package com.netflix.raigad.defaultimpl; import com.google.common.io.Files; import com.netflix.raigad.configuration.FakeConfiguration; import com.netflix.raigad.configuration.IConfiguration; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; public class TestStandardTuner { private IConfiguration config; private StandardTuner tuner; @Before public void setup() { config = new FakeConfiguration(); tuner = new StandardTuner(config); } @Test public void dump() throws IOException { String target = "/tmp/raigad_test.yaml"; Files.copy(new File("src/test/resources/elasticsearch.yml"), new File("/tmp/raigad_test.yaml")); tuner.writeAllProperties(target, "your_host"); } }
5,486
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/defaultimpl/TestElasticsearchProcessManager.java
package com.netflix.raigad.defaultimpl; import com.netflix.raigad.configuration.FakeConfiguration; import com.netflix.raigad.configuration.IConfiguration; import org.junit.Before; import org.junit.Test; import java.io.IOException; import static junit.framework.TestCase.assertEquals; import static org.junit.Assert.assertTrue; public class TestElasticsearchProcessManager { private ElasticsearchProcessManager elasticSearchProcessManager; @Before public void setup() { IConfiguration config = new FakeConfiguration("us-east-1", "test_cluster", "us-east-1a", "i-1234afd3"); elasticSearchProcessManager = new ElasticsearchProcessManager(config); } @Test public void logProcessOutput_BadApp() throws IOException, InterruptedException { Process p = null; try { p = new ProcessBuilder("ls", "/tmppppp").start(); int exitValue = p.waitFor(); assertTrue(0 != exitValue); elasticSearchProcessManager.logProcessOutput(p); } catch (IOException ioe) { if (p != null) { elasticSearchProcessManager.logProcessOutput(p); } } } /** * Note: this will succeed on a *nix machine, unclear about anything else... */ @Test public void logProcessOutput_GoodApp() throws IOException, InterruptedException { Process p = new ProcessBuilder("true").start(); int exitValue = p.waitFor(); assertEquals(0, exitValue); elasticSearchProcessManager.logProcessOutput(p); } }
5,487
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/dataobjects/TestMasterNodeInfoMapper.java
package com.netflix.raigad.dataobjects; import com.netflix.raigad.objectmapper.DefaultMasterNodeInfoMapper; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; /* [ { "id":"8sZZWYmmQaeNUKMq1S1uow", "host":"es-test-useast1d-master-i-9e112345", "ip":"10.111.22.333", "node":"us-east-1d.i-9e112345" } ] */ public class TestMasterNodeInfoMapper { ObjectMapper mapper = new DefaultMasterNodeInfoMapper(); @Test public void testMasterNodeInformationObject() throws IOException { String masterNodeInfo = "[{\"id\":\"8sZZWYmmQaeNUKMq1S1uow\",\"host\":\"es-test-useast1d-master-i-9e112345\",\"ip\":\"10.111.22.333\",\"node\":\"us-east-1d.i-9e112345\"}]"; try { List<MasterNodeInformation> myObjs = mapper.readValue(masterNodeInfo, new TypeReference<ArrayList<MasterNodeInformation>>() { }); assertEquals(1, myObjs.size()); for (MasterNodeInformation key : myObjs) { assertEquals("8sZZWYmmQaeNUKMq1S1uow", key.getId()); assertEquals("es-test-useast1d-master-i-9e112345", key.getHost()); assertEquals("10.111.22.333", key.getIp()); assertEquals("us-east-1d.i-9e112345", key.getNode()); } } catch (JsonGenerationException e) { e.printStackTrace(); } catch (JsonMappingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
5,488
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/backup/TestBackupRestore.java
package com.netflix.raigad.backup; import com.google.inject.Guice; import com.google.inject.Injector; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.configuration.UnitTestModule; import com.netflix.raigad.utils.ElasticsearchTransportClient; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; import org.apache.commons.io.FileUtils; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.junit.*; import java.io.File; import java.io.IOException; import java.util.List; /** * Reference:https://github.com/elasticsearch/elasticsearch-cloud-aws/blob/es-1.1/src/test/java/org/elasticsearch/repositories/s3/S3SnapshotRestoreTest.java * <p> * Following tests do not test S3 cloud functionality but uses fs (file system) locally to run Snapshot and Backup * TODO: Need to fix for S3 functionality */ /* { "20140331": { "type": "s3", "settings": { "region": "us-east-1", "base_path": "es_test/20140331", "bucket": "es-backup-test" } }, "20140410": { "type": "s3", "settings": { "region": "us-east-1", "base_path": "es_test/20140410", "bucket": "es-backup-test" } } } */ @Ignore @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 2) public class TestBackupRestore extends ESIntegTestCase { private static final char PATH_SEP = File.separatorChar; public static String repositoryName = ""; public static String repositoryLocation = ""; public static String LOCAL_DIR = "data"; private static Injector injector; public static Client client0; @Mocked private static ElasticsearchTransportClient esTransportClient; private static IConfiguration configuration; private static S3RepositorySettingsParams s3RepositorySettingsParams; private static S3Repository s3Repository; @Mocked private static SnapshotBackupManager snapshotBackupManager; @Mocked private static RestoreBackupManager restoreBackupManager; @Before public final void setup() throws IOException { System.out.println("Running setup now..."); injector = Guice.createInjector(new UnitTestModule()); configuration = injector.getInstance(IConfiguration.class); s3RepositorySettingsParams = injector.getInstance(S3RepositorySettingsParams.class); esTransportClient = injector.getInstance(ElasticsearchTransportClient.class); s3Repository = injector.getInstance(S3Repository.class); if (snapshotBackupManager == null) { snapshotBackupManager = injector.getInstance(SnapshotBackupManager.class); } if (restoreBackupManager == null) { restoreBackupManager = injector.getInstance(RestoreBackupManager.class); } wipeRepositories(); cleanupDir(LOCAL_DIR, null); } @After public final void wipeAfter() throws IOException { System.out.println("Running wipeAfter ..."); wipeRepositories(); injector = null; configuration = null; s3RepositorySettingsParams = null; s3Repository = null; esTransportClient = null; client0 = null; cleanupDir(LOCAL_DIR, null); } @Test public void testSimpleWorkflow() throws Exception { client0 = client(); repositoryName = s3Repository.getRemoteRepositoryName(); //Create S3 Repository Assert.assertFalse(s3Repository.createOrGetSnapshotRepository() == null); createIndex("test-idx-1", "test-idx-3"); ensureGreen(); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i); } refresh(); Assert.assertEquals(client0.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), 100L); Assert.assertEquals(client0.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), 100L); //Run backup snapshotBackupManager.runSnapshotBackup(); Assert.assertEquals( client0.admin().cluster().prepareGetSnapshots(repositoryName).setSnapshots( snapshotBackupManager.getSnapshotName("_all", false)) .get().getSnapshots().get(0).state(), SnapshotState.SUCCESS); logger.info("--> delete some data"); for (int i = 0; i < 50; i++) { client0.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get(); } for (int i = 0; i < 100; i += 2) { client0.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); } refresh(); Assert.assertEquals(client0.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), 50L); Assert.assertEquals(client0.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), 50L); logger.info("--> close indices"); client0.admin().indices().prepareClose("test-idx-1", "test-idx-3").get(); logger.info("--> restore all indices from the snapshot"); restoreBackupManager.runRestore(repositoryName, "fs", snapshotBackupManager.getSnapshotName("_all", false), null, null, null); ensureGreen(); Assert.assertEquals(client0.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), 100L); Assert.assertEquals(client0.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), 100L); } @Ignore public static class MockElasticsearchTransportClient extends MockUp<ElasticsearchTransportClient>{ @Mock public static ElasticsearchTransportClient instance(IConfiguration config) { return esTransportClient; } @Mock public Client getTransportClient() { return client0; } } @Ignore public static class MockS3Repository extends MockUp<S3Repository> { @Mock public PutRepositoryResponse getPutRepositoryResponse(Client esTransportClient, String s3RepoName) { String localRepositoryLocation = LOCAL_DIR + PATH_SEP + s3RepositorySettingsParams.getBase_path(); PutRepositoryResponse putRepositoryResponse = client0.admin().cluster() .preparePutRepository(repositoryName) .setType(AbstractRepository.RepositoryType.fs.name()) .setSettings(Settings.builder().put("location", localRepositoryLocation)) .get(); //Setting local repository location repositoryLocation = localRepositoryLocation; return putRepositoryResponse; } } @Ignore public static class MockSnapshotBackupManager extends MockUp<SnapshotBackupManager> { @Mock public CreateSnapshotResponse getCreateSnapshotResponse(Client esTransportClient, String repositoryName, String snapshotName) { return client0.admin().cluster().prepareCreateSnapshot(repositoryName, snapshotName) .setWaitForCompletion(configuration.waitForCompletionOfBackup()) .setIndices(configuration.getCommaSeparatedIndicesToBackup()) .setIncludeGlobalState(configuration.includeGlobalStateDuringBackup()) .setPartial(configuration.partiallyBackupIndices()).get(); } } @Ignore public static class MockRestoreBackupManager extends MockUp<RestoreBackupManager> { @Mock public RestoreSnapshotResponse getRestoreSnapshotResponse( Client esTransportClient, String commaSeparatedIndices, String restoreRepositoryName, String snapshotN) { snapshotN = snapshotBackupManager.getSnapshotName("_all", false); return client0.admin().cluster().prepareRestoreSnapshot(repositoryName, snapshotN) .setIndices("test-idx-*") .setWaitForCompletion(true) .execute() .actionGet(); } } public static void cleanupDir(String dirPath, List<String> childDirs) throws IOException { if (childDirs == null || childDirs.size() == 0) { FileUtils.cleanDirectory(new File(dirPath)); } else { for (String childDir : childDirs) { FileUtils.cleanDirectory(new File(dirPath + "/" + childDir)); } } } /** * Deletes repositories, supports wildcard notation. */ public static void wipeRepositories(String... repositories) { // if nothing is provided, delete all if (repositories.length == 0) { repositories = new String[]{"*"}; } for (String repository : repositories) { try { client().admin().cluster().prepareDeleteRepository(repository).execute().actionGet(); } catch (RepositoryMissingException ex) { // ignore } } } }
5,489
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestIndexNameFilter.java
package com.netflix.raigad.indexmanagement; import com.netflix.raigad.indexmanagement.indexfilters.DatePatternIndexNameFilter; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class TestIndexNameFilter { @Test public void testWrongPrefix() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("foo2018")); } @Test public void testYearlyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018")); } @Test public void testYearlyPatternWithYYYYMM() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd201802")); } @Test public void testMonthlyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd201802")); } @Test public void testMonthlyPatternWithSingleDigitMonth() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd20182")); } @Test public void testMonthlyPatternWithYYYYMMdd() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd20180203")); } @Test public void testWeeklyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'-YYYY-ww"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd-2018-51")); } @Test public void testWeeklyPatternInvalid() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'-YYYY-ww"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd-2018-53")); } @Test public void testDailyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMdd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd20180203")); } @Test public void testHalfDayPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'-YYYY-MM-dd-aa"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd-2018-02-03-AM")); assertTrue(filter.filter("abcd-2018-02-03-PM")); assertFalse(filter.filter("abcd-2018-02-03-BC")); } @Test public void testHourlyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMddHH"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018020323")); } @Test public void testHourlyPatternInvalidHour() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMddHH"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd2018020328")); } @Test public void testPatternWithDashes() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY-MM-dd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018-02-27")); } @Test public void testPatternWithDots() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY.MM.dd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018.02.27")); } @Test public void testPatternWithSuffix() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY-MM-dd'ghi'"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018-02-27ghi")); } @Test public void testHourlyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMddHH"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2013120300")); assertTrue(filter.filter("abcd2013120301")); assertTrue(filter.filter("abcd2013120312")); assertTrue(filter.filter("abcd2013120323")); assertFalse(filter.filter("abcd12013120323")); assertFalse(filter.filter("abcd2013120324")); assertFalse(filter.filter("abcd2013120345")); assertFalse(filter.filter("abcd20231248")); assertFalse(filter.filter("_abc")); } @Test public void testDailyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMdd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd20131203")); assertFalse(filter.filter("abcd120131203")); assertFalse(filter.filter("abcd20231248")); assertFalse(filter.filter("abcd202312")); assertFalse(filter.filter("_abc")); } @Test public void testMonthlyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd202312")); assertFalse(filter.filter("abcd1202312")); assertFalse(filter.filter("abcd20131203")); assertFalse(filter.filter("_abc")); System.out.println(formatter.parseDateTime("abcd20231")); assertFalse(filter.filter("abcd20231")); assertFalse(filter.filter("abcd202313")); assertFalse(filter.filter("abcd20231248")); } @Test public void testYearlyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2023")); assertFalse(filter.filter("abcd20131203")); assertFalse(filter.filter("_abc")); assertFalse(filter.filter("abcd202")); } }
5,490
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestElasticsearchIndexManager.java
package com.netflix.raigad.indexmanagement; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.indexmanagement.exception.UnsupportedAutoIndexException; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; import org.joda.time.DateTime; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.management.ObjectName; import java.io.IOException; import java.lang.management.ManagementFactory; import java.util.*; import static org.mockito.Mockito.*; public class TestElasticsearchIndexManager { private static final int AUTO_CREATE_INDEX_TIMEOUT = 300000; private Client elasticsearchClient; private IConfiguration config; private ElasticsearchIndexManager elasticsearchIndexManager; @Before public void setUp() throws Exception { config = mock(IConfiguration.class); when(config.getAutoCreateIndexTimeout()).thenReturn(AUTO_CREATE_INDEX_TIMEOUT); elasticsearchClient = mock(Client.class); elasticsearchIndexManager = spy(new ElasticsearchIndexManager(config, null)); doReturn(elasticsearchClient).when(elasticsearchIndexManager).getTransportClient(); doNothing().when(elasticsearchIndexManager).deleteIndices(eq(elasticsearchClient), anyString(), anyInt()); } @Test public void testRunIndexManagement_NotActionable_NoIndex() throws Exception { String serializedIndexMetadata = "[{\"retentionType\": \"yearly\", \"retentionPeriod\": 20}]"; when(config.getIndexMetadata()).thenReturn(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2018", new IndexStats("nf_errors_log2018", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(); verify(elasticsearchIndexManager, times(0)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); verify(elasticsearchIndexManager, times(0)).preCreateIndex(any(Client.class), any(IndexMetadata.class), any(DateTime.class)); } @Test public void testRunIndexManagement_NotActionable_NoRetentionPeriod() throws Exception { String serializedIndexMetadata = "[{\"retentionType\": \"yearly\", \"indexName\": \"nf_errors_log\"}]"; when(config.getIndexMetadata()).thenReturn(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2018", new IndexStats("nf_errors_log2018", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(); verify(elasticsearchIndexManager, times(0)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); verify(elasticsearchIndexManager, times(0)).preCreateIndex(any(Client.class), any(IndexMetadata.class), any(DateTime.class)); } @Test public void testRunIndexManagement() throws Exception { String serializedIndexMetadata = "[{\"retentionType\": \"yearly\", \"retentionPeriod\": 3, \"indexName\": \"nf_errors_log\"}]"; when(config.getIndexMetadata()).thenReturn(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2018", new IndexStats("nf_errors_log2018", new ShardStats[]{})); indexStats.put("nf_errors_log2017", new IndexStats("nf_errors_log2017", new ShardStats[]{})); indexStats.put("nf_errors_log2016", new IndexStats("nf_errors_log2016", new ShardStats[]{})); indexStats.put("nf_errors_log2015", new IndexStats("nf_errors_log2015", new ShardStats[]{})); indexStats.put("nf_errors_log2014", new IndexStats("nf_errors_log2014", new ShardStats[]{})); indexStats.put("nf_errors_log2013", new IndexStats("nf_errors_log2013", new ShardStats[]{})); indexStats.put("nf_errors_log2012", new IndexStats("nf_errors_log2012", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(); verify(elasticsearchIndexManager, times(1)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); verify(elasticsearchIndexManager, times(1)).deleteIndices(any(Client.class), eq("nf_errors_log2012"), eq(AUTO_CREATE_INDEX_TIMEOUT)); verify(elasticsearchIndexManager, times(1)).deleteIndices(any(Client.class), eq("nf_errors_log2013"), eq(AUTO_CREATE_INDEX_TIMEOUT)); verify(elasticsearchIndexManager, times(0)).preCreateIndex(any(Client.class), any(IndexMetadata.class), any(DateTime.class)); } @Test public void testCheckIndexRetention_Hourly() throws IOException, UnsupportedAutoIndexException { String serializedIndexMetadata = "[{\"preCreate\": false, \"retentionType\": \"hourly\", \"retentionPeriod\": 2, \"indexName\": \"nf_errors_log\"}]"; List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata(serializedIndexMetadata); IndexMetadata indexMetadata = indexMetadataList.get(0); Set<String> indices = new HashSet<>( Arrays.asList("nf_errors_log2017062210", "nf_errors_log2017062211", "nf_errors_log2017062212", "nf_errors_log2017062213", "nf_errors_log2017062214")); elasticsearchIndexManager.checkIndexRetention(elasticsearchClient, indices, indexMetadata, new DateTime("2017-06-22T13:30Z")); verify(elasticsearchIndexManager, times(1)).deleteIndices(any(Client.class), eq("nf_errors_log2017062210"), eq(AUTO_CREATE_INDEX_TIMEOUT)); } @Test public void testCheckIndexRetention_Overlapping() throws Exception { String serializedIndexMetadata = "[{\"preCreate\": false, \"retentionType\": \"hourly\", \"retentionPeriod\": 2, \"indexName\": \"nf_errors_log\"}," + "{\"preCreate\": false, \"retentionType\": \"yearly\", \"retentionPeriod\": 3, \"indexName\": \"nf_errors_log201712\"}]"; List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2017121110", new IndexStats("nf_errors_log2017121110", new ShardStats[]{})); indexStats.put("nf_errors_log2017121111", new IndexStats("nf_errors_log2017121111", new ShardStats[]{})); indexStats.put("nf_errors_log2017121112", new IndexStats("nf_errors_log2017121112", new ShardStats[]{})); indexStats.put("nf_errors_log2017121113", new IndexStats("nf_errors_log2017121113", new ShardStats[]{})); indexStats.put("nf_errors_log2017121114", new IndexStats("nf_errors_log2017121114", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(elasticsearchClient, indexMetadataList, new DateTime("2017-12-11T13:30Z")); verify(elasticsearchIndexManager, times(2)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); } @After public void cleanUp() throws Exception { ManagementFactory.getPlatformMBeanServer().unregisterMBean( new ObjectName("com.netflix.raigad.scheduler:type=" + ElasticsearchIndexManager.class.getName())); } }
5,491
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestIndexMetadata.java
package com.netflix.raigad.indexmanagement; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; import org.joda.time.Period; import org.junit.Test; import java.io.IOException; import java.util.List; import static org.junit.Assert.*; public class TestIndexMetadata { @Test public void testBadInputNoIndexName() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"retentionType\": \"monthly\",\"retentionPeriod\": 20}]"); assertEquals(indexMetadataList.size(), 1); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("index201312")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("index20131212")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("a20141233")); assertFalse(indexMetadataList.get(0).isPreCreate()); assertEquals(indexMetadataList.get(0).getRetentionPeriod().toString(), "P20M"); assertEquals(indexMetadataList.get(0).getIndexNamePattern(), null); assertFalse(indexMetadataList.get(0).isActionable()); } @Test public void testBadInputNoRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"retentionType\": \"monthly\", \"indexName\": \"nf_errors_log\"}]"); assertEquals(indexMetadataList.size(), 1); assertTrue(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadataList.get(0).isPreCreate()); assertEquals(indexMetadataList.get(0).getRetentionPeriod(), null); assertFalse(indexMetadataList.get(0).isActionable()); } @Test public void testBadInputInvalidSymbols() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"retentionType\":\"monthly\",\"indexName\":\"nf_errors_log\",\"retentionPeriod?:6,?preCreate\":false}]"); assertEquals(indexMetadataList.size(), 1); assertTrue(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadataList.get(0).isPreCreate()); assertEquals(indexMetadataList.get(0).getRetentionPeriod(), null); assertFalse(indexMetadataList.get(0).isActionable()); } @Test(expected = JsonMappingException.class) public void testBadInputInvalidRetention() throws IOException { IndexUtils.parseIndexMetadata( "[{\"retentionType\": \"monthly\", \"indexName\": \"nf_errors_log\",\"retentionPeriod\":\"A\"}]"); } @Test(expected = JsonMappingException.class) public void testBadInputInvalidNamePattern() throws IOException { IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"nf_errors_logYYYY\",\"retentionPeriod\":\"P1M\"}]"); } @Test(expected = JsonParseException.class) public void testBadInputBadJson() throws IOException { IndexUtils.parseIndexMetadata("[{\"retentionType\": \"monthly\", \"indexName\": \"nf_errors_log\","); } @Test public void testFiveMinuteRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionPeriod\":\"PT5M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals(Period.minutes(5), indexMetadata.getRetentionPeriod()); } @Test public void testOneHourRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionPeriod\":\"PT1H\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals(Period.hours(1), indexMetadata.getRetentionPeriod()); } @Test public void test18MonthRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionPeriod\":\"P18M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals(Period.months(18), indexMetadata.getRetentionPeriod()); } @Test public void testNamePatternOverridesRetentionType() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionType\":\"daily\",\"retentionPeriod\":\"P18M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals("'nf_errors_log'YYYY", indexMetadata.getIndexNamePattern()); } @Test public void testNamePatternOverridesIndexName() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"indexName\":\"errors\",\"retentionPeriod\":\"P18M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals("'nf_errors_log'YYYY", indexMetadata.getIndexNamePattern()); } @Test public void testMixedRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[ { \"retentionType\": \"yearly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"monthly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"daily\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"false\" }]"); assertEquals(indexMetadataList.size(), 4); IndexMetadata indexMetadata = indexMetadataList.get(0); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_log2013")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYY"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "P20Y"); assertTrue(indexMetadata.isActionable()); indexMetadata = indexMetadataList.get(1); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_lgg201312")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYYMM"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "P20M"); assertTrue(indexMetadata.isActionable()); indexMetadata = indexMetadataList.get(2); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121201")); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121200")); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121223")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_lgg2013121223")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121224")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertTrue(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYYMMddHH"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "PT20H"); assertTrue(indexMetadata.isActionable()); indexMetadata = indexMetadataList.get(3); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_lgg20141230")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYYMMdd"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "P20D"); assertTrue(indexMetadata.isActionable()); } }
5,492
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestIndexUtils.java
package com.netflix.raigad.indexmanagement; import com.netflix.raigad.indexmanagement.exception.UnsupportedAutoIndexException; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.junit.Test; import java.io.IOException; import java.util.List; import static org.junit.Assert.assertEquals; public class TestIndexUtils { private static DateTime dateTime(int v, String fmt) { return DateTimeFormat.forPattern(fmt).withZoneUTC().parseDateTime("" + v); } @Test public void testPastRetentionCutoffDate() throws IOException, UnsupportedAutoIndexException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[ { \"retentionType\": \"yearly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"monthly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 40, \"indexName\": \"nf_errors_log\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"daily\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"false\" }]"); IndexMetadata yearlyMetadata = indexMetadataList.get(0); IndexMetadata monthlyMetadata = indexMetadataList.get(1); IndexMetadata hourlyMetadata20 = indexMetadataList.get(2); IndexMetadata hourlyMetadata40 = indexMetadataList.get(3); IndexMetadata dailyMetadata = indexMetadataList.get(4); DateTime currentDateTime = new DateTime("2017-11-15T12:34:56Z"); assertEquals(dateTime(1997, "yyyy"), yearlyMetadata.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(201603, "yyyyMM"), monthlyMetadata.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(20171026, "yyyyMMdd"), dailyMetadata.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(2017111416, "yyyyMMddHH"), hourlyMetadata20.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(2017111320, "yyyyMMddHH"), hourlyMetadata40.getPastRetentionCutoffDate(currentDateTime)); } @Test public void testIndexNameToPreCreate() throws IOException, UnsupportedAutoIndexException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[ { \"retentionType\": \"yearly\", \"retentionPeriod\": 20, \"indexName\": \"index\" }," + "{ \"retentionType\": \"monthly\", \"retentionPeriod\": 20, \"indexName\": \"0\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 20, \"indexName\": \"index1\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"daily\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log_useast1\", \"preCreate\": \"false\" }]"); IndexMetadata yearlyMetadata = indexMetadataList.get(0); IndexMetadata monthlyMetadata = indexMetadataList.get(1); IndexMetadata hourlyMetadata = indexMetadataList.get(2); IndexMetadata dailyMetadata = indexMetadataList.get(3); DateTime currentDateTime = new DateTime("2017-11-15T12:34:56Z"); assertEquals("index2018", yearlyMetadata.getIndexNameToPreCreate(currentDateTime)); assertEquals("0201712", monthlyMetadata.getIndexNameToPreCreate(currentDateTime)); assertEquals("nf_errors_log_useast120171116", dailyMetadata.getIndexNameToPreCreate(currentDateTime)); assertEquals("index12017111513", hourlyMetadata.getIndexNameToPreCreate(currentDateTime)); } }
5,493
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/RaigadConfiguration.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import com.amazonaws.services.ec2.model.*; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.config.*; import com.netflix.raigad.aws.ICredential; import com.netflix.raigad.utils.RetriableCallable; import com.netflix.raigad.utils.SystemUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; @Singleton public class RaigadConfiguration implements IConfiguration { private static final Logger logger = LoggerFactory.getLogger(RaigadConfiguration.class); public static final String MY_WEBAPP_NAME = "Raigad"; private static final String CONFIG_CLUSTER_NAME = MY_WEBAPP_NAME + ".es.clustername"; private static final String CONFIG_AVAILABILITY_ZONES = MY_WEBAPP_NAME + ".zones.available"; private static final String CONFIG_DATA_LOCATION = MY_WEBAPP_NAME + ".es.data.location"; private static final String CONFIG_LOG_LOCATION = MY_WEBAPP_NAME + ".es.log.location"; private static final String CONFIG_ES_START_SCRIPT = MY_WEBAPP_NAME + ".es.startscript"; private static final String CONFIG_ES_STOP_SCRIPT = MY_WEBAPP_NAME + ".es.stopscript"; private static final String CONFIG_ES_HOME = MY_WEBAPP_NAME + ".es.home"; private static final String CONFIG_FD_PING_INTERVAL = MY_WEBAPP_NAME + ".es.fd.pinginterval"; private static final String CONFIG_FD_PING_TIMEOUT = MY_WEBAPP_NAME + ".es.fd.pingtimeout"; private static final String CONFIG_HTTP_PORT = MY_WEBAPP_NAME + ".es.http.port"; private static final String CONFIG_TRANSPORT_TCP_PORT = MY_WEBAPP_NAME + ".es.transport.tcp.port"; private static final String CONFIG_MIN_MASTER_NODES = MY_WEBAPP_NAME + ".es.min.master.nodes"; private static final String CONFIG_NUM_REPLICAS = MY_WEBAPP_NAME + ".es.num.replicas"; private static final String CONFIG_NUM_SHARDS = MY_WEBAPP_NAME + ".es.num.shards"; private static final String CONFIG_PING_TIMEOUT = MY_WEBAPP_NAME + ".es.pingtimeout"; private static final String CONFIG_INDEX_REFRESH_INTERVAL = MY_WEBAPP_NAME + ".es.index.refresh.interval"; private static final String CONFIG_IS_MASTER_QUORUM_ENABLED = MY_WEBAPP_NAME + ".es.master.quorum.enabled"; private static final String CONFIG_IS_PING_MULTICAST_ENABLED = MY_WEBAPP_NAME + ".es.ping.multicast.enabled"; private static final String CONFIG_ES_DISCOVERY_TYPE = MY_WEBAPP_NAME + ".es.discovery.type"; private static final String CONFIG_BOOTCLUSTER_NAME = MY_WEBAPP_NAME + ".bootcluster"; private static final String CONFIG_INSTANCE_DATA_RETRIEVER = MY_WEBAPP_NAME + ".instanceDataRetriever"; private static final String CONFIG_CREDENTIAL_PROVIDER = MY_WEBAPP_NAME + ".credentialProvider"; private static final String CONFIG_SECURITY_GROUP_NAME = MY_WEBAPP_NAME + ".security.group.name"; private static final String CONFIG_IS_MULTI_DC_ENABLED = MY_WEBAPP_NAME + ".es.multi.dc.enabled"; private static final String CONFIG_IS_ASG_BASED_DEPLOYMENT_ENABLED = MY_WEBAPP_NAME + ".es.asg.based.deployment.enabled"; private static final String CONFIG_ES_CLUSTER_ROUTING_ATTRIBUTES = MY_WEBAPP_NAME + ".es.cluster.routing.attributes"; private static final String CONFIG_ES_PROCESS_NAME = MY_WEBAPP_NAME + ".es.processname"; private static final String CONFIG_ES_SHARD_ALLOCATION_ATTRIBUTE = MY_WEBAPP_NAME + ".es.shard.allocation.attribute"; private static final String CONFIG_IS_SHARD_ALLOCATION_POLICY_ENABLED = MY_WEBAPP_NAME + ".shard.allocation.policy.enabled"; private static final String CONFIG_EXTRA_PARAMS = MY_WEBAPP_NAME + ".extra.params"; private static final String CONFIG_IS_DEBUG_ENABLED = MY_WEBAPP_NAME + ".debug.enabled"; private static final String CONFIG_IS_SHARDS_PER_NODE_ENABLED = MY_WEBAPP_NAME + ".shards.per.node.enabled"; private static final String CONFIG_SHARDS_PER_NODE = MY_WEBAPP_NAME + ".shards.per.node"; private static final String CONFIG_INDEX_METADATA = MY_WEBAPP_NAME + ".index.metadata"; private static final String CONFIG_IS_INDEX_AUTOCREATION_ENABLED = MY_WEBAPP_NAME + ".index.autocreation.enabled"; private static final String CONFIG_AUTOCREATE_INDEX_TIMEOUT = MY_WEBAPP_NAME + ".autocreate.index.timeout"; private static final String CONFIG_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS = MY_WEBAPP_NAME + ".autocreate.index.initial.start.delay.seconds"; private static final String CONFIG_AUTOCREATE_INDEX_SCHEDULE_MINUTES = MY_WEBAPP_NAME + ".autocreate.index.schedule.minutes"; private static final String CONFIG_BACKUP_LOCATION = MY_WEBAPP_NAME + ".backup.location"; private static final String CONFIG_BACKUP_HOUR = MY_WEBAPP_NAME + ".backup.hour"; private static final String CONFIG_BACKUP_IS_SNAPSHOT_ENABLED = MY_WEBAPP_NAME + ".snapshot.enabled"; private static final String CONFIG_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED = MY_WEBAPP_NAME + ".hourly.snapshot.enabled"; private static final String CONFIG_BACKUP_COMMA_SEPARATED_INDICES = MY_WEBAPP_NAME + ".backup.comma.separated.indices"; private static final String CONFIG_BACKUP_PARTIAL_INDICES = MY_WEBAPP_NAME + ".backup.partial.indices"; private static final String CONFIG_BACKUP_INCLUDE_GLOBAL_STATE = MY_WEBAPP_NAME + ".backup.include.global.state"; private static final String CONFIG_BACKUP_WAIT_FOR_COMPLETION = MY_WEBAPP_NAME + ".backup.wait.for.completion"; private static final String CONFIG_BACKUP_INCLUDE_INDEX_NAME = MY_WEBAPP_NAME + ".backup.include.index.name"; private static final String CONFIG_BACKUP_CRON_TIMER_SECONDS = MY_WEBAPP_NAME + ".backup.cron.timer.seconds"; private static final String CONFIG_IS_RESTORE_ENABLED = MY_WEBAPP_NAME + ".restore.enabled"; private static final String CONFIG_RESTORE_REPOSITORY_NAME = MY_WEBAPP_NAME + ".restore.repository.name"; private static final String CONFIG_RESTORE_REPOSITORY_TYPE = MY_WEBAPP_NAME + ".restore.repository.type"; private static final String CONFIG_RESTORE_SNAPSHOT_NAME = MY_WEBAPP_NAME + ".restore.snapshot.name"; private static final String CONFIG_RESTORE_COMMA_SEPARATED_INDICES = MY_WEBAPP_NAME + ".restore.comma.separated.indices"; private static final String CONFIG_RESTORE_TASK_INITIAL_START_DELAY_SECONDS = MY_WEBAPP_NAME + ".restore.task.initial.start.delay.seconds"; private static final String CONFIG_RESTORE_SOURCE_CLUSTER_NAME = MY_WEBAPP_NAME + ".restore.source.cluster.name"; private static final String CONFIG_RESTORE_SOURCE_REPO_REGION = MY_WEBAPP_NAME + ".restore.source.repo.region"; private static final String CONFIG_RESTORE_LOCATION = MY_WEBAPP_NAME + ".restore.location"; private static final String CONFIG_AM_I_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.enabled"; private static final String CONFIG_AM_I_WRITE_ENABLED_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.write.enabled"; private static final String CONFIG_AM_I_METADATA_ENABLED_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.metadata.enabled"; private static final String CONFIG_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS = MY_WEBAPP_NAME + ".tribe.comma.separated.source.clusters"; private static final String CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.source.cluster.enabled"; private static final String CONFIG_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS = MY_WEBAPP_NAME + ".tribe.comma.separated.tribe.clusters"; private static final String CONFIG_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED = MY_WEBAPP_NAME + ".nodemismatch.health.metrics.enabled"; private static final String CONFIG_DESIRED_NUM_NODES_IN_CLUSTER = MY_WEBAPP_NAME + ".desired.num.nodes.in.cluster"; private static final String CONFIG_IS_EUREKA_HEALTH_CHECK_ENABLED = MY_WEBAPP_NAME + ".eureka.health.check.enabled"; private static final String CONFIG_IS_LOCAL_MODE_ENABLED = MY_WEBAPP_NAME + ".local.mode.enabled"; private static final String CONFIG_CASSANDRA_KEYSPACE_NAME = MY_WEBAPP_NAME + ".cassandra.keyspace.name"; private static final String CONFIG_CASSANDRA_THRIFT_PORT = MY_WEBAPP_NAME + ".cassandra.thrift.port"; private static final String CONFIG_IS_EUREKA_HOST_SUPPLIER_ENABLED = MY_WEBAPP_NAME + ".eureka.host.supplier.enabled"; private static final String CONFIG_COMMA_SEPARATED_CASSANDRA_HOSTNAMES = MY_WEBAPP_NAME + ".comma.separated.cassandra.hostnames"; private static final String CONFIG_IS_SECURITY_GROUP_IN_MULTI_DC = MY_WEBAPP_NAME + ".security.group.in.multi.dc.enabled"; private static final String CONFIG_IS_KIBANA_SETUP_REQUIRED = MY_WEBAPP_NAME + ".kibana.setup.required"; private static final String CONFIG_KIBANA_PORT = MY_WEBAPP_NAME + ".kibana.port"; private static final String CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC = MY_WEBAPP_NAME + ".tribe.node.source.cluster.enabled.in.multi.dc"; private static final String CONFIG_REPORT_METRICS_FROM_MASTER_ONLY = MY_WEBAPP_NAME + ".report.metrics.from.master.only"; private static final String CONFIG_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT = MY_WEBAPP_NAME + ".tribe.preferred.cluster.id.on.conflict"; // Amazon specific private static final String CONFIG_ASG_NAME = MY_WEBAPP_NAME + ".az.asgname"; private static final String CONFIG_STACK_NAME = MY_WEBAPP_NAME + ".az.stack"; private static final String CONFIG_REGION_NAME = MY_WEBAPP_NAME + ".az.region"; private static final String CONFIG_ACL_GROUP_NAME = MY_WEBAPP_NAME + ".acl.groupname"; private static final String CONFIG_ACL_GROUP_NAME_FOR_VPC = MY_WEBAPP_NAME + ".acl.groupname.vpc"; private static Boolean IS_DEPLOYED_IN_VPC = false; private static Boolean IS_VPC_EXTERNAL = false; private static final String MAC_ID = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/mac"); private static String VPC_ID = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/network/interfaces/macs/" + MAC_ID + "/vpc-id").trim(); private static String PUBLIC_HOSTNAME, PUBLIC_IP, ACL_GROUP_ID_FOR_VPC; { if (StringUtils.equals(VPC_ID, SystemUtils.NOT_FOUND_STR)) { PUBLIC_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-hostname").trim(); PUBLIC_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-ipv4").trim(); } else { IS_DEPLOYED_IN_VPC = true; IS_VPC_EXTERNAL = true; PUBLIC_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-hostname").trim(); if (StringUtils.equals(PUBLIC_HOSTNAME, SystemUtils.NOT_FOUND_STR)) { // Looks like this is VPC internal, trying local hostname PUBLIC_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-hostname").trim(); IS_VPC_EXTERNAL = false; } logger.info("Node host name initialized with {}", PUBLIC_HOSTNAME); PUBLIC_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-ipv4").trim(); if (StringUtils.equals(PUBLIC_IP, SystemUtils.NOT_FOUND_STR)) { // Looks like this is VPC internal, trying local IP PUBLIC_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-ipv4").trim(); IS_VPC_EXTERNAL = false; } logger.info("Node IP initialized with {}", PUBLIC_IP); } } private static final String RAC = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/placement/availability-zone"); private static final String LOCAL_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-hostname").trim(); private static final String LOCAL_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-ipv4").trim(); private static final String INSTANCE_ID = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/instance-id").trim(); private static final String INSTANCE_TYPE = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/instance-type").trim(); private static final String ES_NODE_NAME = RAC + "." + INSTANCE_ID; private static String ASG_NAME = System.getenv("ASG_NAME"); private static String STACK_NAME = System.getenv("STACK_NAME"); private static String REGION = System.getenv("EC2_REGION"); // Defaults private final String DEFAULT_CLUSTER_NAME = "es_samplecluster"; private List<String> DEFAULT_AVAILABILITY_ZONES = ImmutableList.of(); private static final String DEFAULT_DATA_LOCATION = "/mnt/data/es"; private static final String DEFAULT_LOG_LOCATION = "/logs/es"; private static final String DEFAULT_YAML_LOCATION = "/apps/elasticsearch/config/elasticsearch.yml"; private static final String DEFAULT_ES_START_SCRIPT = "/etc/init.d/elasticsearch start"; private static final String DEFAULT_ES_STOP_SCRIPT = "/etc/init.d/elasticsearch stop"; private static final String DEFAULT_ES_HOME = "/apps/elasticsearch"; private static final String DEFAULT_FD_PING_INTERVAL = "30s"; private static final String DEFAULT_FD_PING_TIMEOUT = "30s"; private static final int DEFAULT_HTTP_PORT = 7104; private static final int DEFAULT_TRANSPORT_TCP_PORT = 7102; private static final int DEFAULT_MIN_MASTER_NODES = 1; private static final int DEFAULT_NUM_REPLICAS = 2; private static final int DEFAULT_NUM_SHARDS = 5; private static final String DEFAULT_PING_TIMEOUT = "60s"; private static final String DEFAULT_INDEX_REFRESH_INTERVAL = "1m"; private static final boolean DEFAULT_IS_MASTER_QUORUM_ENABLED = true; private static final boolean DEFAULT_IS_PING_MULTICAST_ENABLED = false; private static final String DEFAULT_CONFIG_BOOTCLUSTER_NAME = "cass_metadata"; private static final String DEFAULT_CREDENTIAL_PROVIDER = "com.netflix.raigad.aws.IAMCredential"; private static final String DEFAULT_ES_DISCOVERY_TYPE = "raigad"; private static final boolean DEFAULT_IS_MULTI_DC_ENABLED = false; private static final boolean DEFAULT_IS_ASG_BASED_DEPLOYMENT_ENABLED = false; private static final String DEFAULT_ES_CLUSTER_ROUTING_ATTRIBUTES = "rack_id"; private static final String DEFAULT_ES_PROCESS_NAME = "org.elasticsearch.bootstrap.Elasticsearch"; private static final boolean DEFAULT_IS_SHARD_ALLOCATION_POLICY_ENABLED = false; private static final String DEFAULT_ES_SHARD_ALLOCATION_ATTRIBUTE = "all"; private static final String DEFAULT_CONFIG_EXTRA_PARAMS = null; private static final boolean DEFAULT_IS_DEBUG_ENABLED = false; private static final boolean DEFAULT_IS_SHARDS_PER_NODE_ENABLED = false; private static final int DEFAULT_SHARDS_PER_NODE = 5; private static final boolean DEFAULT_IS_INDEX_AUTOCREATION_ENABLED = false; private static final int DEFAULT_AUTOCREATE_INDEX_TIMEOUT = 300000; private static final int DEFAULT_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS = 300; private static final int DEFAULT_AUTOCREATE_INDEX_SCHEDULE_MINUTES = 10; private static final String DEFAULT_INDEX_METADATA = null; private static final String DEFAULT_BACKUP_LOCATION = "elasticsearch-us-east-1-backup"; private static final int DEFAULT_BACKUP_HOUR = 1; private static final String DEFAULT_BACKUP_COMMA_SEPARATED_INDICES = "_all"; private static final boolean DEFAULT_BACKUP_PARTIAL_INDICES = false; private static final boolean DEFAULT_BACKUP_INCLUDE_GLOBAL_STATE = false; private static final boolean DEFAULT_BACKUP_WAIT_FOR_COMPLETION = true; private static final boolean DEFAULT_BACKUP_INCLUDE_INDEX_NAME = false; private static final boolean DEFAULT_IS_RESTORE_ENABLED = false; private static final String DEFAULT_RESTORE_REPOSITORY_NAME = "testrepo"; private static final String DEFAULT_RESTORE_REPOSITORY_TYPE = "s3"; private static final String DEFAULT_RESTORE_SNAPSHOT_NAME = ""; private static final String DEFAULT_RESTORE_COMMA_SEPARATED_INDICES = "_all"; private static final int DEFAULT_RESTORE_TASK_INITIAL_START_DELAY_SECONDS = 600; private static final String DEFAULT_RESTORE_SOURCE_CLUSTER_NAME = ""; private static final String DEFAULT_RESTORE_SOURCE_REPO_REGION = "us-east-1"; private static final String DEFAULT_RESTORE_LOCATION = "elasticsearch-us-east-1-backup"; private static final boolean DEFAULT_BACKUP_IS_SNAPSHOT_ENABLED = false; private static final boolean DEFAULT_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED = false; private static final long DEFAULT_BACKUP_CRON_TIMER_SECONDS = 3600; private static final boolean DEFAULT_AM_I_TRIBE_NODE = false; private static final boolean DEFAULT_AM_I_WRITE_ENABLED_TRIBE_NODE = false; private static final boolean DEFAULT_AM_I_METADATA_ENABLED_TRIBE_NODE = false; private static final String DEFAULT_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS = ""; private static final boolean DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE = false; private static final String DEFAULT_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS = ""; private static final boolean DEFAULT_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED = false; private static final int DEFAULT_DESIRED_NUM_NODES_IN_CLUSTER = 6; private static final boolean DEFAULT_IS_EUREKA_HEALTH_CHECK_ENABLED = true; private static final boolean DEFAULT_IS_LOCAL_MODE_ENABLED = false; private static final String DEFAULT_CASSANDRA_KEYSPACE_NAME = "escarbootstrap"; private static final int DEFAULT_CASSANDRA_THRIFT_PORT = 7102; private static final boolean DEFAULT_IS_EUREKA_HOST_SUPPLIER_ENABLED = true; private static final String DEFAULT_COMMA_SEPARATED_CASSANDRA_HOSTNAMES = ""; private static final boolean DEFAULT_IS_SECURITY_GROUP_IN_MULTI_DC = false; private static final boolean DEFAULT_IS_KIBANA_SETUP_REQUIRED = false; private static final int DEFAULT_KIBANA_PORT = 8001; private static final boolean DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC = false; private static final boolean DEFAULT_REPORT_METRICS_FROM_MASTER_ONLY = false; private static final String DEFAULT_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT = "t0"; private static final String DEFAULT_ACL_GROUP_NAME_FOR_VPC = "es_samplecluster"; private final IConfigSource config; private final ICredential provider; private final DynamicStringProperty CREDENTIAL_PROVIDER = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_CREDENTIAL_PROVIDER, DEFAULT_CREDENTIAL_PROVIDER); private final DynamicStringProperty ES_STARTUP_SCRIPT_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_START_SCRIPT, DEFAULT_ES_START_SCRIPT); private final DynamicStringProperty ES_STOP_SCRIPT_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_STOP_SCRIPT, DEFAULT_ES_STOP_SCRIPT); private final DynamicStringProperty DATA_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_DATA_LOCATION, DEFAULT_DATA_LOCATION); private final DynamicStringProperty LOG_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_LOG_LOCATION, DEFAULT_LOG_LOCATION); private final DynamicStringProperty ES_HOME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_HOME, DEFAULT_ES_HOME); private final DynamicStringProperty FD_PING_INTERVAL = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_FD_PING_INTERVAL, DEFAULT_FD_PING_INTERVAL); private final DynamicStringProperty FD_PING_TIMEOUT = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_FD_PING_TIMEOUT, DEFAULT_FD_PING_TIMEOUT); private final DynamicIntProperty ES_HTTP_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_HTTP_PORT, DEFAULT_HTTP_PORT); private final DynamicIntProperty ES_TRANSPORT_TCP_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_TRANSPORT_TCP_PORT, DEFAULT_TRANSPORT_TCP_PORT); private final DynamicIntProperty MINIMUM_MASTER_NODES = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_MIN_MASTER_NODES, DEFAULT_MIN_MASTER_NODES); private final DynamicIntProperty NUM_REPLICAS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_NUM_REPLICAS, DEFAULT_NUM_REPLICAS); private final DynamicIntProperty NUM_SHARDS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_NUM_SHARDS, DEFAULT_NUM_SHARDS); private final DynamicStringProperty PING_TIMEOUT = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_PING_TIMEOUT, DEFAULT_PING_TIMEOUT); private final DynamicStringProperty INDEX_REFRESH_INTERVAL = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_INDEX_REFRESH_INTERVAL, DEFAULT_INDEX_REFRESH_INTERVAL); private final DynamicBooleanProperty IS_MASTER_QUORUM_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_MASTER_QUORUM_ENABLED, DEFAULT_IS_MASTER_QUORUM_ENABLED); private final DynamicBooleanProperty IS_PING_MULTICAST_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_PING_MULTICAST_ENABLED, DEFAULT_IS_PING_MULTICAST_ENABLED); private final DynamicStringProperty BOOTCLUSTER_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_BOOTCLUSTER_NAME, DEFAULT_CONFIG_BOOTCLUSTER_NAME); private final DynamicStringProperty ES_DISCOVERY_TYPE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_DISCOVERY_TYPE, DEFAULT_ES_DISCOVERY_TYPE); private final DynamicStringProperty SECURITY_GROUP_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_SECURITY_GROUP_NAME, DEFAULT_CLUSTER_NAME); private final DynamicBooleanProperty IS_MULTI_DC_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_MULTI_DC_ENABLED, DEFAULT_IS_MULTI_DC_ENABLED); private final DynamicBooleanProperty IS_ASG_BASED_DEPLOYMENT_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_ASG_BASED_DEPLOYMENT_ENABLED, DEFAULT_IS_ASG_BASED_DEPLOYMENT_ENABLED); private final DynamicStringProperty ES_CLUSTER_ROUTING_ATTRIBUTES = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_CLUSTER_ROUTING_ATTRIBUTES, DEFAULT_ES_CLUSTER_ROUTING_ATTRIBUTES); private final DynamicBooleanProperty IS_SHARD_ALLOCATION_POLICY_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_SHARD_ALLOCATION_POLICY_ENABLED, DEFAULT_IS_SHARD_ALLOCATION_POLICY_ENABLED); private final DynamicStringProperty ES_SHARD_ALLOCATION_ATTRIBUTE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_SHARD_ALLOCATION_ATTRIBUTE, DEFAULT_ES_SHARD_ALLOCATION_ATTRIBUTE); private final DynamicStringProperty EXTRA_PARAMS = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_EXTRA_PARAMS, DEFAULT_CONFIG_EXTRA_PARAMS); private final DynamicBooleanProperty IS_DEBUG_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_DEBUG_ENABLED, DEFAULT_IS_DEBUG_ENABLED); private final DynamicBooleanProperty IS_SHARDS_PER_NODE_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_SHARDS_PER_NODE_ENABLED, DEFAULT_IS_SHARDS_PER_NODE_ENABLED); private final DynamicIntProperty TOTAL_SHARDS_PER_NODES = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_SHARDS_PER_NODE, DEFAULT_SHARDS_PER_NODE); private final DynamicStringProperty INDEX_METADATA = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_INDEX_METADATA, DEFAULT_INDEX_METADATA); private final DynamicBooleanProperty IS_INDEX_AUTOCREATION_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_INDEX_AUTOCREATION_ENABLED, DEFAULT_IS_INDEX_AUTOCREATION_ENABLED); private final DynamicIntProperty AUTOCREATE_INDEX_TIMEOUT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_AUTOCREATE_INDEX_TIMEOUT, DEFAULT_AUTOCREATE_INDEX_TIMEOUT); private final DynamicIntProperty AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS, DEFAULT_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS); private final DynamicIntProperty AUTOCREATE_INDEX_SCHEDULE_MINUTES = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_AUTOCREATE_INDEX_SCHEDULE_MINUTES, DEFAULT_AUTOCREATE_INDEX_SCHEDULE_MINUTES); private final DynamicStringProperty ES_PROCESS_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_PROCESS_NAME, DEFAULT_ES_PROCESS_NAME); private final DynamicStringProperty BUCKET_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_BACKUP_LOCATION, DEFAULT_BACKUP_LOCATION); private final DynamicIntProperty BACKUP_HOUR = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_BACKUP_HOUR, DEFAULT_BACKUP_HOUR); private final DynamicStringProperty COMMA_SEPARATED_INDICES_TO_BACKUP = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_BACKUP_COMMA_SEPARATED_INDICES, DEFAULT_BACKUP_COMMA_SEPARATED_INDICES); private final DynamicBooleanProperty PARTIALLY_BACKUP_INDICES = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_PARTIAL_INDICES, DEFAULT_BACKUP_PARTIAL_INDICES); private final DynamicBooleanProperty INCLUDE_GLOBAL_STATE_DURING_BACKUP = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_INCLUDE_GLOBAL_STATE, DEFAULT_BACKUP_INCLUDE_GLOBAL_STATE); private final DynamicBooleanProperty WAIT_FOR_COMPLETION_OF_BACKUP = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_WAIT_FOR_COMPLETION, DEFAULT_BACKUP_WAIT_FOR_COMPLETION); private final DynamicBooleanProperty INCLUDE_INDEX_NAME_IN_SNAPSHOT_BACKUP = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_INCLUDE_INDEX_NAME, DEFAULT_BACKUP_INCLUDE_INDEX_NAME); private final DynamicBooleanProperty IS_RESTORE_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_RESTORE_ENABLED, DEFAULT_IS_RESTORE_ENABLED); private final DynamicStringProperty RESTORE_REPOSITORY_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_REPOSITORY_NAME, DEFAULT_RESTORE_REPOSITORY_NAME); private final DynamicStringProperty RESTORE_REPOSITORY_TYPE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_REPOSITORY_TYPE, DEFAULT_RESTORE_REPOSITORY_TYPE); private final DynamicStringProperty RESTORE_SNAPSHOT_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_SNAPSHOT_NAME, DEFAULT_RESTORE_SNAPSHOT_NAME); private final DynamicStringProperty COMMA_SEPARATED_INDICES_TO_RESTORE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_COMMA_SEPARATED_INDICES, DEFAULT_RESTORE_COMMA_SEPARATED_INDICES); private final DynamicIntProperty RESTORE_TASK_INITIAL_START_DELAY_SECONDS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_RESTORE_TASK_INITIAL_START_DELAY_SECONDS, DEFAULT_RESTORE_TASK_INITIAL_START_DELAY_SECONDS); private final DynamicStringProperty RESTORE_SOURCE_CLUSTER_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_SOURCE_CLUSTER_NAME, DEFAULT_RESTORE_SOURCE_CLUSTER_NAME); private final DynamicStringProperty RESTORE_SOURCE_REPO_REGION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_SOURCE_REPO_REGION, DEFAULT_RESTORE_SOURCE_REPO_REGION); private final DynamicStringProperty RESTORE_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_LOCATION, DEFAULT_RESTORE_LOCATION); private final DynamicBooleanProperty IS_SNAPSHOT_BACKUP_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_IS_SNAPSHOT_ENABLED, DEFAULT_BACKUP_IS_SNAPSHOT_ENABLED); private final DynamicBooleanProperty IS_HOURLY_SNAPSHOT_BACKUP_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED, DEFAULT_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED); private final DynamicLongProperty BACKUP_CRON_TIMER_SECONDS = DynamicPropertyFactory.getInstance().getLongProperty(CONFIG_BACKUP_CRON_TIMER_SECONDS, DEFAULT_BACKUP_CRON_TIMER_SECONDS); private final DynamicBooleanProperty AM_I_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_TRIBE_NODE, DEFAULT_AM_I_TRIBE_NODE); private final DynamicBooleanProperty AM_I_WRITE_ENABLED_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_WRITE_ENABLED_TRIBE_NODE, DEFAULT_AM_I_WRITE_ENABLED_TRIBE_NODE); private final DynamicBooleanProperty AM_I_METADATA_ENABLED_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_METADATA_ENABLED_TRIBE_NODE, DEFAULT_AM_I_METADATA_ENABLED_TRIBE_NODE); private final DynamicStringProperty COMMA_SEPARATED_SOURCE_CLUSTERS_IN_TRIBE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS, DEFAULT_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS); private final DynamicBooleanProperty AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE, DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE); private final DynamicStringProperty COMMA_SEPARATED_TRIBE_CLUSTERS = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS, DEFAULT_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS); private final DynamicBooleanProperty IS_NODE_MISMATCH_WITH_DISCOVERY_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED, DEFAULT_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED); private final DynamicIntProperty DESIRED_NUM_NODES_IN_CLUSTER = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_DESIRED_NUM_NODES_IN_CLUSTER, DEFAULT_DESIRED_NUM_NODES_IN_CLUSTER); private final DynamicBooleanProperty IS_EUREKA_HEALTH_CHECK_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_EUREKA_HEALTH_CHECK_ENABLED, DEFAULT_IS_EUREKA_HEALTH_CHECK_ENABLED); private final DynamicBooleanProperty IS_LOCAL_MODE_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_LOCAL_MODE_ENABLED, DEFAULT_IS_LOCAL_MODE_ENABLED); private final DynamicStringProperty CASSANDRA_KEYSPACE_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_CASSANDRA_KEYSPACE_NAME, DEFAULT_CASSANDRA_KEYSPACE_NAME); private final DynamicIntProperty CASSANDRA_THRIFT_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_CASSANDRA_THRIFT_PORT, DEFAULT_CASSANDRA_THRIFT_PORT); private final DynamicBooleanProperty IS_EUREKA_HOST_SUPPLIER_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_EUREKA_HOST_SUPPLIER_ENABLED, DEFAULT_IS_EUREKA_HOST_SUPPLIER_ENABLED); private final DynamicStringProperty COMMA_SEPARATED_CASSANDRA_HOSTNAMES = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_COMMA_SEPARATED_CASSANDRA_HOSTNAMES, DEFAULT_COMMA_SEPARATED_CASSANDRA_HOSTNAMES); private final DynamicBooleanProperty IS_SECURITY_GROUP_IN_MULTI_DC = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_SECURITY_GROUP_IN_MULTI_DC, DEFAULT_IS_SECURITY_GROUP_IN_MULTI_DC); private final DynamicBooleanProperty IS_KIBANA_SETUP_REQUIRED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_KIBANA_SETUP_REQUIRED, DEFAULT_IS_KIBANA_SETUP_REQUIRED); private final DynamicIntProperty KIBANA_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_KIBANA_PORT, DEFAULT_KIBANA_PORT); private final DynamicBooleanProperty AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC, DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC); private final DynamicBooleanProperty REPORT_METRICS_FROM_MASTER_ONLY = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_REPORT_METRICS_FROM_MASTER_ONLY, DEFAULT_REPORT_METRICS_FROM_MASTER_ONLY); private final DynamicStringProperty TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT, DEFAULT_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT); private final DynamicStringProperty ACL_GROUP_NAME_FOR_VPC = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ACL_GROUP_NAME_FOR_VPC, DEFAULT_ACL_GROUP_NAME_FOR_VPC); @Inject public RaigadConfiguration(ICredential provider, IConfigSource config) { this.provider = provider; this.config = config; } @Override public void initialize() { setupEnvVars(); this.config.initialize(ASG_NAME, REGION); setDefaultRACList(REGION); populateProps(); SystemUtils.createDirs(getDataFileLocation()); } private void setupEnvVars() { REGION = StringUtils.isBlank(REGION) ? System.getProperty("EC2_REGION") : REGION; if (StringUtils.isBlank(REGION)) { REGION = RAC.substring(0, RAC.length() - 1); } ASG_NAME = StringUtils.isBlank(ASG_NAME) ? System.getProperty("ASG_NAME") : ASG_NAME; if (StringUtils.isBlank(ASG_NAME)) { ASG_NAME = populateASGName(REGION, INSTANCE_ID); } STACK_NAME = StringUtils.isBlank(STACK_NAME) ? System.getProperty("STACK_NAME") : STACK_NAME; logger.info(String.format("REGION set to [%s], ASG Name set to [%s]", REGION, ASG_NAME)); } /** * Query amazon to get ASG name. Currently not available as part of instance * info api. */ private String populateASGName(String region, String instanceId) { GetASGName getASGName = new GetASGName(region, instanceId); try { return getASGName.call(); } catch (Exception e) { logger.error("Failed to determine ASG name", e); return null; } } private class GetASGName extends RetriableCallable<String> { private static final int NUMBER_OF_RETRIES = 15; private static final long WAIT_TIME = 30000; private final String region; private final String instanceId; private final AmazonEC2 client; public GetASGName(String region, String instanceId) { super(NUMBER_OF_RETRIES, WAIT_TIME); this.region = region; this.instanceId = instanceId; client = new AmazonEC2Client(provider.getAwsCredentialProvider()); client.setEndpoint("ec2." + region + ".amazonaws.com"); } @Override public String retriableCall() throws IllegalStateException { DescribeInstancesRequest desc = new DescribeInstancesRequest().withInstanceIds(instanceId); DescribeInstancesResult res = client.describeInstances(desc); for (Reservation resr : res.getReservations()) { for (Instance ins : resr.getInstances()) { for (com.amazonaws.services.ec2.model.Tag tag : ins.getTags()) { if (tag.getKey().equals("aws:autoscaling:groupName")) return tag.getValue(); } } } logger.warn("Couldn't determine ASG name"); throw new IllegalStateException("Couldn't determine ASG name"); } } /** * Get the fist 3 available zones in the region */ public void setDefaultRACList(String region) { AmazonEC2 client = new AmazonEC2Client(provider.getAwsCredentialProvider()); client.setEndpoint("ec2." + region + ".amazonaws.com"); DescribeAvailabilityZonesResult res = client.describeAvailabilityZones(); List<String> zone = Lists.newArrayList(); for (AvailabilityZone reg : res.getAvailabilityZones()) { if (reg.getState().equals("available")) { zone.add(reg.getZoneName()); } if (zone.size() == 3) { break; } } DEFAULT_AVAILABILITY_ZONES = ImmutableList.copyOf(zone); } private void populateProps() { config.set(CONFIG_ASG_NAME, ASG_NAME); config.set(CONFIG_REGION_NAME, REGION); } @Override public List<String> getRacs() { return config.getList(CONFIG_AVAILABILITY_ZONES, DEFAULT_AVAILABILITY_ZONES); } @Override public String getDC() { return config.get(CONFIG_REGION_NAME, ""); } @Override public void setDC(String region) { config.set(CONFIG_REGION_NAME, region); } @Override public String getASGName() { return config.get(CONFIG_ASG_NAME, ASG_NAME); } @Override public String getStackName() { return config.get(CONFIG_STACK_NAME, STACK_NAME); } @Override public String getACLGroupName() { return config.get(CONFIG_ACL_GROUP_NAME, this.getAppName()); } @Override public String getDataFileLocation() { return DATA_LOCATION.get(); } @Override public String getLogFileLocation() { return LOG_LOCATION.get(); } @Override public String getElasticsearchStartupScript() { return ES_STARTUP_SCRIPT_LOCATION.get(); } @Override public String getYamlLocation() { return DEFAULT_YAML_LOCATION; } @Override public String getBackupLocation() { return BUCKET_NAME.get(); } @Override public String getElasticsearchHome() { return ES_HOME.get(); } @Override public String getElasticsearchStopScript() { return ES_STOP_SCRIPT_LOCATION.get(); } @Override public String getFdPingInterval() { return FD_PING_INTERVAL.get(); } @Override public String getFdPingTimeout() { return FD_PING_TIMEOUT.get(); } @Override public int getHttpPort() { return ES_HTTP_PORT.get(); } @Override public int getTransportTcpPort() { return ES_TRANSPORT_TCP_PORT.get(); } @Override public int getMinimumMasterNodes() { return MINIMUM_MASTER_NODES.get(); } @Override public int getNumOfReplicas() { return NUM_REPLICAS.get(); } @Override public int getTotalShardsPerNode() { return TOTAL_SHARDS_PER_NODES.get(); } @Override public int getNumOfShards() { return NUM_SHARDS.get(); } @Override public String getPingTimeout() { return PING_TIMEOUT.get(); } @Override public String getRefreshInterval() { return INDEX_REFRESH_INTERVAL.get(); } @Override public boolean isMasterQuorumEnabled() { return IS_MASTER_QUORUM_ENABLED.get(); } @Override public boolean isPingMulticastEnabled() { return IS_PING_MULTICAST_ENABLED.get(); } @Override public String getHostIP() { return PUBLIC_IP; } @Override public String getHostname() { return PUBLIC_HOSTNAME; } @Override public String getInstanceName() { return INSTANCE_ID; } @Override public String getInstanceId() { return INSTANCE_ID; } @Override public String getHostLocalIP() { return LOCAL_IP; } @Override public String getRac() { return RAC; } @Override public String getAppName() { return config.get(CONFIG_CLUSTER_NAME, DEFAULT_CLUSTER_NAME); } @Override public String getBootClusterName() { return BOOTCLUSTER_NAME.get(); } @Override public String getElasticsearchDiscoveryType() { return ES_DISCOVERY_TYPE.get(); } @Override public boolean isMultiDC() { return IS_MULTI_DC_ENABLED.get(); } @Override public String getClusterRoutingAttributes() { return ES_CLUSTER_ROUTING_ATTRIBUTES.get(); } @Override public boolean isAsgBasedDedicatedDeployment() { return IS_ASG_BASED_DEPLOYMENT_ENABLED.get(); } @Override public String getElasticsearchProcessName() { return ES_PROCESS_NAME.get(); } /** * @return Elasticsearch Index Refresh Interval */ public String getIndexRefreshInterval() { return INDEX_REFRESH_INTERVAL.get(); } @Override public boolean doesElasticsearchStartManually() { return false; } @Override public String getClusterShardAllocationAttribute() { return ES_SHARD_ALLOCATION_ATTRIBUTE.get(); } @Override public boolean isCustomShardAllocationPolicyEnabled() { return IS_SHARD_ALLOCATION_POLICY_ENABLED.get(); } @Override public String getEsKeyName(String escarKey) { return config.get(escarKey); } @Override public boolean isDebugEnabled() { return IS_DEBUG_ENABLED.get(); } @Override public boolean isShardPerNodeEnabled() { return IS_SHARDS_PER_NODE_ENABLED.get(); } @Override public boolean isIndexAutoCreationEnabled() { return IS_INDEX_AUTOCREATION_ENABLED.get(); } @Override public String getIndexMetadata() { return INDEX_METADATA.get(); } @Override public int getAutoCreateIndexTimeout() { return AUTOCREATE_INDEX_TIMEOUT.get(); } @Override public int getAutoCreateIndexInitialStartDelaySeconds() { return AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS.get(); } @Override public int getAutoCreateIndexScheduleMinutes() { return AUTOCREATE_INDEX_SCHEDULE_MINUTES.get(); } @Override public String getExtraConfigParams() { return EXTRA_PARAMS.get(); } @Override public int getBackupHour() { return BACKUP_HOUR.get(); } public boolean isSnapshotBackupEnabled() { return IS_SNAPSHOT_BACKUP_ENABLED.get(); } @Override public String getCommaSeparatedIndicesToBackup() { return COMMA_SEPARATED_INDICES_TO_BACKUP.get(); } @Override public boolean partiallyBackupIndices() { return PARTIALLY_BACKUP_INDICES.get(); } @Override public boolean includeGlobalStateDuringBackup() { return INCLUDE_GLOBAL_STATE_DURING_BACKUP.get(); } @Override public boolean waitForCompletionOfBackup() { return WAIT_FOR_COMPLETION_OF_BACKUP.get(); } @Override public boolean includeIndexNameInSnapshot() { return INCLUDE_INDEX_NAME_IN_SNAPSHOT_BACKUP.get(); } @Override public boolean isHourlySnapshotEnabled() { return IS_HOURLY_SNAPSHOT_BACKUP_ENABLED.get(); } @Override public long getBackupCronTimerInSeconds() { return BACKUP_CRON_TIMER_SECONDS.get(); } @Override public boolean isRestoreEnabled() { return IS_RESTORE_ENABLED.get(); } @Override public String getRestoreRepositoryName() { return RESTORE_REPOSITORY_NAME.get(); } @Override public String getRestoreSourceClusterName() { return RESTORE_SOURCE_CLUSTER_NAME.get(); } @Override public String getRestoreSourceRepositoryRegion() { return RESTORE_SOURCE_REPO_REGION.get(); } @Override public String getRestoreLocation() { return RESTORE_LOCATION.get(); } @Override public String getRestoreRepositoryType() { return RESTORE_REPOSITORY_TYPE.get(); } @Override public String getRestoreSnapshotName() { return RESTORE_SNAPSHOT_NAME.get(); } @Override public String getCommaSeparatedIndicesToRestore() { return COMMA_SEPARATED_INDICES_TO_RESTORE.get(); } @Override public int getRestoreTaskInitialDelayInSeconds() { return RESTORE_TASK_INITIAL_START_DELAY_SECONDS.get(); } @Override public boolean amITribeNode() { return AM_I_TRIBE_NODE.get(); } @Override public boolean amIWriteEnabledTribeNode() { return AM_I_WRITE_ENABLED_TRIBE_NODE.get(); } @Override public boolean amIMetadataEnabledTribeNode() { return AM_I_METADATA_ENABLED_TRIBE_NODE.get(); } @Override public String getCommaSeparatedSourceClustersForTribeNode() { return COMMA_SEPARATED_SOURCE_CLUSTERS_IN_TRIBE.get(); } @Override public boolean amISourceClusterForTribeNode() { return AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE.get(); } @Override public String getCommaSeparatedTribeClusterNames() { return COMMA_SEPARATED_TRIBE_CLUSTERS.get(); } @Override public boolean isNodeMismatchWithDiscoveryEnabled() { return IS_NODE_MISMATCH_WITH_DISCOVERY_ENABLED.get(); } @Override public int getDesiredNumberOfNodesInCluster() { return DESIRED_NUM_NODES_IN_CLUSTER.get(); } @Override public boolean isEurekaHealthCheckEnabled() { return IS_EUREKA_HEALTH_CHECK_ENABLED.get(); } @Override public boolean isLocalModeEnabled() { return IS_LOCAL_MODE_ENABLED.get(); } @Override public String getCassandraKeyspaceName() { return CASSANDRA_KEYSPACE_NAME.get(); } @Override public int getCassandraThriftPortForAstyanax() { return CASSANDRA_THRIFT_PORT.get(); } @Override public boolean isEurekaHostSupplierEnabled() { return IS_EUREKA_HOST_SUPPLIER_ENABLED.get(); } @Override public String getCommaSeparatedCassandraHostNames() { return COMMA_SEPARATED_CASSANDRA_HOSTNAMES.get(); } @Override public boolean isSecurityGroupInMultiDC() { return IS_SECURITY_GROUP_IN_MULTI_DC.get(); } @Override public boolean isKibanaSetupRequired() { return IS_KIBANA_SETUP_REQUIRED.get(); } @Override public int getKibanaPort() { return KIBANA_PORT.get(); } @Override public boolean amISourceClusterForTribeNodeInMultiDC() { return AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC.get(); } @Override public boolean reportMetricsFromMasterOnly() { return REPORT_METRICS_FROM_MASTER_ONLY.get(); } @Override public String getTribePreferredClusterIdOnConflict() { return TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT.get(); } @Override public String getEsNodeName() { return ES_NODE_NAME; } @Override public boolean isDeployedInVPC() { return IS_DEPLOYED_IN_VPC; } @Override public boolean isVPCExternal() { return IS_VPC_EXTERNAL; } @Override public String getACLGroupNameForVPC() { return ACL_GROUP_NAME_FOR_VPC.get(); } @Override public String getACLGroupIdForVPC() { return ACL_GROUP_ID_FOR_VPC; } @Override public void setACLGroupIdForVPC(String aclGroupIdForVPC) { ACL_GROUP_ID_FOR_VPC = aclGroupIdForVPC; } @Override public String getMacIdForInstance() { return MAC_ID; } }
5,494
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/RaigadConfigSource.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import javax.inject.Inject; public class RaigadConfigSource extends CompositeConfigSource { @Inject public RaigadConfigSource(final PropertiesConfigSource propertiesConfigSource, final SystemPropertiesConfigSource systemPropertiesConfigSource) { super(propertiesConfigSource, systemPropertiesConfigSource); } }
5,495
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/AbstractConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import java.util.List; import static com.google.common.base.Preconditions.checkNotNull; /** * Base implementations for most methods on {@link IConfigSource}. */ public abstract class AbstractConfigSource implements IConfigSource { private String asgName; private String region; @Override public void initialize(final String asgName, final String region) { this.asgName = checkNotNull(asgName, "ASG name is not defined"); this.region = checkNotNull(region, "Region is not defined"); } @Override public boolean contains(final String key) { return get(key) != null; } @Override public boolean isEmpty() { return size() == 0; } @Override public String get(final String key, final String defaultValue) { final String value = get(key); return (value != null) ? value : defaultValue; } @Override public boolean get(final String key, final boolean defaultValue) { final String value = get(key); if (value != null) { try { return Boolean.parseBoolean(value); } catch (Exception e) { // ignore and return default } } return defaultValue; } @Override public Class<?> get(final String key, final Class<?> defaultValue) { final String value = get(key); if (value != null) { try { return Class.forName(value); } catch (ClassNotFoundException e) { // ignore and return default } } return defaultValue; } @Override public <T extends Enum<T>> T get(final String key, final T defaultValue) { final String value = get(key); if (value != null) { try { return Enum.valueOf(defaultValue.getDeclaringClass(), value); } catch (Exception e) { // ignore and return default. } } return defaultValue; } @Override public int get(final String key, final int defaultValue) { final String value = get(key); if (value != null) { try { return Integer.parseInt(value); } catch (Exception e) { // ignore and return default } } return defaultValue; } @Override public long get(final String key, final long defaultValue) { final String value = get(key); if (value != null) { try { return Long.parseLong(value); } catch (Exception e) { // return default. } } return defaultValue; } @Override public float get(final String key, final float defaultValue) { final String value = get(key); if (value != null) { try { return Float.parseFloat(value); } catch (Exception e) { // ignore and return default; } } return defaultValue; } @Override public double get(final String key, final double defaultValue) { final String value = get(key); if (value != null) { try { return Double.parseDouble(value); } catch (Exception e) { // ignore and return default. } } return defaultValue; } @Override public List<String> getList(String prop) { return getList(prop, ImmutableList.<String>of()); } @Override public List<String> getList(String prop, List<String> defaultValue) { final String value = get(prop); if (value != null) { return getTrimmedStringList(value.split(",")); } return defaultValue; } protected String getAsgName() { return asgName; } protected String getRegion() { return region; } private List<String> getTrimmedStringList(String[] strings) { List<String> list = Lists.newArrayList(); for (String s : strings) { list.add(StringUtils.strip(s)); } return list; } }
5,496
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/MemoryConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.collect.Maps; import java.util.Map; public final class MemoryConfigSource extends AbstractConfigSource { private final Map<String, String> data = Maps.newConcurrentMap(); @Override public void initialize(IConfiguration config) { //NO OP as we initiaie using asgName and region } @Override public int size() { return data.size(); } @Override public String get(final String key) { return data.get(key); } @Override public void set(final String key, final String value) { data.put(key, value); } }
5,497
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/IConfiguration.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.inject.ImplementedBy; import java.util.List; @ImplementedBy(RaigadConfiguration.class) public interface IConfiguration { void initialize(); /** * @return Path to the home dir of Elasticsearch */ String getElasticsearchHome(); String getYamlLocation(); String getBackupLocation(); /** * @return Path to Elasticsearch startup script */ String getElasticsearchStartupScript(); /** * @return Path to Elasticsearch stop script */ String getElasticsearchStopScript(); int getTransportTcpPort(); int getHttpPort(); int getNumOfShards(); int getNumOfReplicas(); int getTotalShardsPerNode(); String getRefreshInterval(); boolean isMasterQuorumEnabled(); int getMinimumMasterNodes(); String getPingTimeout(); boolean isPingMulticastEnabled(); String getFdPingInterval(); String getFdPingTimeout(); /** * @return Location of the local data dir */ String getDataFileLocation(); /** * @return Location of the local log dir */ String getLogFileLocation(); boolean doesElasticsearchStartManually(); /** * @return Cluster name */ String getAppName(); /** * @return RAC (or zone for AWS) */ String getRac(); /** * @return List of all RAC used for the cluster */ List<String> getRacs(); /** * @return Local hostmame */ String getHostname(); /** * @return Get instance name (for AWS) */ String getInstanceName(); /** * @return Get instance id (for AWS) */ String getInstanceId(); /** * @return Get the Data Center name (or region for AWS) */ String getDC(); /** * @param dc Set the current data center */ void setDC(String dc); /** * Amazon specific setting to query ASG Membership */ String getASGName(); /** * Amazon specific setting to query ASG Membership */ String getStackName(); /** * Get the security group associated with nodes in this cluster */ String getACLGroupName(); /** * @return Get host IP */ String getHostIP(); /** * @return Get host Local IP */ String getHostLocalIP(); /** * @return Bootstrap cluster name (depends on another cass cluster) */ String getBootClusterName(); /** * @return Elasticsearch Process Name */ String getElasticsearchProcessName(); /** * @return Elasticsearch Discovery Type */ String getElasticsearchDiscoveryType(); /** * @return Whether it's a Multi-Region Setup */ boolean isMultiDC(); /** * @return Elasticsearch Index Refresh Interval */ String getIndexRefreshInterval(); String getClusterRoutingAttributes(); boolean isAsgBasedDedicatedDeployment(); boolean isCustomShardAllocationPolicyEnabled(); String getClusterShardAllocationAttribute(); /** * Providing a way to add New Config Params without any code change */ String getExtraConfigParams(); String getEsKeyName(String escarKey); boolean isDebugEnabled(); boolean isShardPerNodeEnabled(); boolean isIndexAutoCreationEnabled(); String getIndexMetadata(); int getAutoCreateIndexTimeout(); int getAutoCreateIndexInitialStartDelaySeconds(); int getAutoCreateIndexScheduleMinutes(); /* Backup related Config properties */ boolean isSnapshotBackupEnabled(); String getCommaSeparatedIndicesToBackup(); boolean partiallyBackupIndices(); boolean includeGlobalStateDuringBackup(); boolean waitForCompletionOfBackup(); boolean includeIndexNameInSnapshot(); boolean isHourlySnapshotEnabled(); long getBackupCronTimerInSeconds(); /** * @return Backup hour for snapshot backups (0 - 23) */ int getBackupHour(); /* Restore related Config properties */ boolean isRestoreEnabled(); String getRestoreRepositoryName(); String getRestoreSourceClusterName(); String getRestoreSourceRepositoryRegion(); String getRestoreLocation(); String getRestoreRepositoryType(); String getRestoreSnapshotName(); String getCommaSeparatedIndicesToRestore(); int getRestoreTaskInitialDelayInSeconds(); boolean amITribeNode(); boolean amIWriteEnabledTribeNode(); boolean amIMetadataEnabledTribeNode(); String getCommaSeparatedSourceClustersForTribeNode(); boolean amISourceClusterForTribeNode(); String getCommaSeparatedTribeClusterNames(); boolean isNodeMismatchWithDiscoveryEnabled(); int getDesiredNumberOfNodesInCluster(); boolean isEurekaHealthCheckEnabled(); boolean isLocalModeEnabled(); String getCassandraKeyspaceName(); int getCassandraThriftPortForAstyanax(); boolean isEurekaHostSupplierEnabled(); String getCommaSeparatedCassandraHostNames(); boolean isSecurityGroupInMultiDC(); boolean isKibanaSetupRequired(); int getKibanaPort(); /** * @return Whether current cluster is Single Region cluster but is a Source Cluster in Multi-Region Tribe Node Setup */ boolean amISourceClusterForTribeNodeInMultiDC(); boolean reportMetricsFromMasterOnly(); /** * To prefer the index from a specific tribe * * @return tribe id */ String getTribePreferredClusterIdOnConflict(); String getEsNodeName(); /** * Check if instance is deployed in VPC * * @return true or false */ boolean isDeployedInVPC(); /** * Check if instance is deployed in VPC external * * @return true or false */ boolean isVPCExternal(); /** * Get the security group associated with nodes in this cluster in VPC */ String getACLGroupNameForVPC(); /** * Get the security group id for given Security Group in VPC */ String getACLGroupIdForVPC(); /** * Set the security group id for given Security Group in VPC */ void setACLGroupIdForVPC(String aclGroupIdForVPC); /** * Get the MAC id for an instance */ String getMacIdForInstance(); }
5,498
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/IConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.inject.ImplementedBy; import java.util.List; /** * Defines the configurations for an application. */ @ImplementedBy(RaigadConfigSource.class) public interface IConfigSource { /** * Must be called before any other method. This method will allow implementations to do any setup that they require * before being called. */ void initialize(String asgName, String region); /* * An alternative means of initialization, allowing implementaiton to do setup using configuration. */ void initialize(IConfiguration config); /** * A non-negative integer indicating a count of elements. * * @return non-negative integer indicating a count of elements. */ int size(); /** * Returns {@code true} if the size is zero. May be more efficient than calculating size. * * @return {@code true} if the size is zero otherwise {@code false}. */ boolean isEmpty(); /** * Check if the given key can be found in the config. * * @param key to look up value. * @return if the key is present */ boolean contains(String key); /** * Get a String associated with the given configuration key. * * @param key to look up value. * @return value from config or null if not present. */ String get(String key); /** * Get a String associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ String get(String key, String defaultValue); /** * Get a boolean associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ boolean get(String key, boolean defaultValue); /** * Get a Class associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ Class<?> get(String key, Class<?> defaultValue); /** * Get a Enum associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @param <T> enum type. * @return value from config or defaultValue if not present. */ <T extends Enum<T>> T get(String key, T defaultValue); /** * Get a int associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ int get(String key, int defaultValue); /** * Get a long associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ long get(String key, long defaultValue); /** * Get a float associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ float get(String key, float defaultValue); /** * Get a double associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ double get(String key, double defaultValue); /** * Get a list of strings associated with the given configuration key. * * @param key to look up value. * @return value from config or an immutable list if not present. */ List<String> getList(String key); /** * Get a list of strings associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ List<String> getList(String key, List<String> defaultValue); /** * Set the value for the given key. * * @param key to set value for. * @param value to set. */ void set(String key, String value); }
5,499