index
int64
0
0
repo_id
stringlengths
26
205
file_path
stringlengths
51
246
content
stringlengths
8
433k
__index_level_0__
int64
0
10k
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/StreamProcessorInformation.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class StreamProcessorInformation implements Serializable { @JsonProperty("Status") private String status; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -4043725115310892727L; @JsonProperty("Status") public String getStatus() { return status; } @JsonProperty("Status") public void setStatus(String status) { this.status = status; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("status", status) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(status) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof StreamProcessorInformation) == false) { return false; } StreamProcessorInformation rhs = ((StreamProcessorInformation) other); return new EqualsBuilder() .append(status, rhs.status) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,000
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognitionInput.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import lombok.Builder; import lombok.Value; /** * Input for Rekognition stream processor. */ @Value @Builder public class RekognitionInput { private String kinesisVideoStreamArn; private String kinesisDataStreamArn; private String iamRoleArn; private String faceCollectionId; private String streamingProcessorName; private Float matchThreshold; }
5,001
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/InputInformation.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class InputInformation implements Serializable { @JsonProperty("KinesisVideo") private KinesisVideo kinesisVideo; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4448679967188698414L; @JsonProperty("KinesisVideo") public KinesisVideo getKinesisVideo() { return kinesisVideo; } @JsonProperty("KinesisVideo") public void setKinesisVideo(KinesisVideo kinesisVideo) { this.kinesisVideo = kinesisVideo; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("kinesisVideo", kinesisVideo) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(kinesisVideo) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof InputInformation) == false) { return false; } InputInformation rhs = ((InputInformation) other); return new EqualsBuilder() .append(kinesisVideo, rhs.kinesisVideo) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,002
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognizedOutput.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.util.ArrayList; import java.util.List; import lombok.Builder; import lombok.Getter; import lombok.Setter; import lombok.ToString; @Builder @Getter @ToString public class RekognizedOutput { private String fragmentNumber; private Double frameOffsetInSeconds; private Double serverTimestamp; private Double producerTimestamp; @Setter private String faceId; private double detectedTime; @Builder.Default private List<FaceSearchOutput> faceSearchOutputs = new ArrayList<>(); public void addFaceSearchOutput(FaceSearchOutput faceSearchOutput) { this.faceSearchOutputs.add(faceSearchOutput); } @Getter @Builder @ToString public static class FaceSearchOutput { private DetectedFace detectedFace; @Builder.Default private List<MatchedFace> matchedFaceList = new ArrayList<>(); } }
5,003
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Face.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Face implements Serializable { @JsonProperty("BoundingBox") private BoundingBox boundingBox; @JsonProperty("FaceId") private String faceId; @JsonProperty("Confidence") private Double confidence; @JsonProperty("ImageId") private String imageId; @JsonProperty("ExternalImageId") private String externalImageId; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4320869723686571816L; @JsonProperty("BoundingBox") public BoundingBox getBoundingBox() { return boundingBox; } @JsonProperty("BoundingBox") public void setBoundingBox(BoundingBox boundingBox) { this.boundingBox = boundingBox; } @JsonProperty("FaceId") public String getFaceId() { return faceId; } @JsonProperty("FaceId") public void setFaceId(String faceId) { this.faceId = faceId; } @JsonProperty("Confidence") public Double getConfidence() { return confidence; } @JsonProperty("Confidence") public void setConfidence(Double confidence) { this.confidence = confidence; } @JsonProperty("ImageId") public String getImageId() { return imageId; } @JsonProperty("ImageId") public void setImageId(String imageId) { this.imageId = imageId; } @JsonProperty("ExternalImageId") public String getExternalImageId() { return externalImageId; } @JsonProperty("ExternalImageId") public void setExternalImageId(String externalImageId) { this.externalImageId = externalImageId; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this).append("boundingBox", boundingBox) .append("faceId", faceId).append("confidence", confidence) .append("imageId", imageId) .append("externalImageId", externalImageId) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder().append(boundingBox) .append(imageId) .append(externalImageId) .append(faceId) .append(additionalProperties).append(confidence).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Face) == false) { return false; } Face rhs = ((Face) other); return new EqualsBuilder().append(boundingBox, rhs.boundingBox) .append(imageId, rhs.imageId) .append(externalImageId, rhs.externalImageId) .append(faceId, rhs.faceId).append(additionalProperties, rhs.additionalProperties) .append(confidence, rhs.confidence).isEquals(); } }
5,004
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognitionOutput.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class RekognitionOutput implements Serializable { @JsonProperty("InputInformation") private InputInformation inputInformation; @JsonProperty("StreamProcessorInformation") private StreamProcessorInformation streamProcessorInformation; @JsonProperty("FaceSearchResponse") private List<FaceSearchResponse> faceSearchResponse = null; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -4243167512470204665L; @JsonProperty("InputInformation") public InputInformation getInputInformation() { return inputInformation; } @JsonProperty("InputInformation") public void setInputInformation(InputInformation inputInformation) { this.inputInformation = inputInformation; } @JsonProperty("StreamProcessorInformation") public StreamProcessorInformation getStreamProcessorInformation() { return streamProcessorInformation; } @JsonProperty("StreamProcessorInformation") public void setStreamProcessorInformation(StreamProcessorInformation streamProcessorInformation) { this.streamProcessorInformation = streamProcessorInformation; } @JsonProperty("FaceSearchResponse") public List<FaceSearchResponse> getFaceSearchResponse() { return faceSearchResponse; } @JsonProperty("FaceSearchResponse") public void setFaceSearchResponse(List<FaceSearchResponse> faceSearchResponse) { this.faceSearchResponse = faceSearchResponse; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("inputInformation", inputInformation) .append("streamProcessorInformation", streamProcessorInformation) .append("faceSearchResponse", faceSearchResponse) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(inputInformation) .append(additionalProperties) .append(faceSearchResponse) .append(streamProcessorInformation).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof RekognitionOutput) == false) { return false; } RekognitionOutput rhs = ((RekognitionOutput) other); return new EqualsBuilder() .append(inputInformation, rhs.inputInformation) .append(additionalProperties, rhs.additionalProperties) .append(faceSearchResponse, rhs.faceSearchResponse) .append(streamProcessorInformation, rhs.streamProcessorInformation).isEquals(); } }
5,005
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Landmark.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Landmark implements Serializable { @JsonProperty("X") private Double x; @JsonProperty("Y") private Double y; @JsonProperty("Type") private String type; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 8108892948615651543L; @JsonProperty("X") public Double getX() { return x; } @JsonProperty("X") public void setX(Double x) { this.x = x; } @JsonProperty("Y") public Double getY() { return y; } @JsonProperty("Y") public void setY(Double y) { this.y = y; } @JsonProperty("Type") public String getType() { return type; } @JsonProperty("Type") public void setType(String type) { this.type = type; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("x", x) .append("y", y) .append("type", type) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(additionalProperties) .append(type) .append(y) .append(x).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Landmark) == false) { return false; } Landmark rhs = ((Landmark) other); return new EqualsBuilder() .append(additionalProperties, rhs.additionalProperties) .append(type, rhs.type) .append(y, rhs.y) .append(x, rhs.x).isEquals(); } }
5,006
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Pose.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Pose implements Serializable { @JsonProperty("Pitch") private Double pitch; @JsonProperty("Roll") private Double roll; @JsonProperty("Yaw") private Double yaw; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 5134659150043632590L; @JsonProperty("Pitch") public Double getPitch() { return pitch; } @JsonProperty("Pitch") public void setPitch(Double pitch) { this.pitch = pitch; } @JsonProperty("Roll") public Double getRoll() { return roll; } @JsonProperty("Roll") public void setRoll(Double roll) { this.roll = roll; } @JsonProperty("Yaw") public Double getYaw() { return yaw; } @JsonProperty("Yaw") public void setYaw(Double yaw) { this.yaw = yaw; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("pitch", pitch) .append("roll", roll) .append("yaw", yaw) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(yaw) .append(roll) .append(additionalProperties) .append(pitch).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Pose) == false) { return false; } Pose rhs = ((Pose) other); return new EqualsBuilder() .append(yaw, rhs.yaw) .append(roll, rhs.roll) .append(additionalProperties, rhs.additionalProperties) .append(pitch, rhs.pitch).isEquals(); } }
5,007
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/Quality.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class Quality implements Serializable { @JsonProperty("Brightness") private Double brightness; @JsonProperty("Sharpness") private Double sharpness; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 2898836203617659983L; @JsonProperty("Brightness") public Double getBrightness() { return brightness; } @JsonProperty("Brightness") public void setBrightness(Double brightness) { this.brightness = brightness; } @JsonProperty("Sharpness") public Double getSharpness() { return sharpness; } @JsonProperty("Sharpness") public void setSharpness(Double sharpness) { this.sharpness = sharpness; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("brightness", brightness) .append("sharpness", sharpness) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(sharpness) .append(brightness) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof Quality) == false) { return false; } Quality rhs = ((Quality) other); return new EqualsBuilder() .append(sharpness, rhs.sharpness) .append(brightness, rhs.brightness) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,008
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/MatchedFace.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class MatchedFace implements Serializable { @JsonProperty("Similarity") private Double similarity; @JsonProperty("Face") private Face face; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -5269363379216197335L; @JsonProperty("Similarity") public Double getSimilarity() { return similarity; } @JsonProperty("Similarity") public void setSimilarity(Double similarity) { this.similarity = similarity; } @JsonProperty("Face") public Face getFace() { return face; } @JsonProperty("Face") public void setFace(Face face) { this.face = face; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("similarity", similarity) .append("face", face) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(face) .append(additionalProperties) .append(similarity).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof MatchedFace) == false) { return false; } MatchedFace rhs = ((MatchedFace) other); return new EqualsBuilder() .append(face, rhs.face) .append(additionalProperties, rhs.additionalProperties) .append(similarity, rhs.similarity).isEquals(); } }
5,009
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/DetectedFace.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class DetectedFace implements Serializable { @JsonProperty("BoundingBox") private BoundingBox boundingBox; @JsonProperty("Confidence") private Double confidence; @JsonProperty("Landmarks") private List<Landmark> landmarks = null; @JsonProperty("Pose") private Pose pose; @JsonProperty("Quality") private Quality quality; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = 4389260550207592384L; @JsonProperty("BoundingBox") public BoundingBox getBoundingBox() { return boundingBox; } @JsonProperty("BoundingBox") public void setBoundingBox(BoundingBox boundingBox) { this.boundingBox = boundingBox; } @JsonProperty("Confidence") public Double getConfidence() { return confidence; } @JsonProperty("Confidence") public void setConfidence(Double confidence) { this.confidence = confidence; } @JsonProperty("Landmarks") public List<Landmark> getLandmarks() { return landmarks; } @JsonProperty("Landmarks") public void setLandmarks(List<Landmark> landmarks) { this.landmarks = landmarks; } @JsonProperty("Pose") public Pose getPose() { return pose; } @JsonProperty("Pose") public void setPose(Pose pose) { this.pose = pose; } @JsonProperty("Quality") public Quality getQuality() { return quality; } @JsonProperty("Quality") public void setQuality(Quality quality) { this.quality = quality; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("boundingBox", boundingBox) .append("confidence", confidence) .append("landmarks", landmarks) .append("pose", pose) .append("quality", quality) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(pose) .append(boundingBox) .append(landmarks) .append(additionalProperties) .append(quality) .append(confidence).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof DetectedFace) == false) { return false; } DetectedFace rhs = ((DetectedFace) other); return new EqualsBuilder() .append(pose, rhs.pose) .append(boundingBox, rhs.boundingBox) .append(landmarks, rhs.landmarks) .append(additionalProperties, rhs.additionalProperties) .append(quality, rhs.quality) .append(confidence, rhs.confidence).isEquals(); } }
5,010
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/RekognizedFragmentsIndex.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; import lombok.extern.slf4j.Slf4j; /** * Index which stores results for each fragment number from Rekognition output i.e Kinesis Data Streams. * It normalizes each kinesis event record (which is for every sampled frame of a fragment) and stores * per fragment number in memory. Rekognition output can be mapped to the KVS fragments using either real-time * GetMedia or archived GetMediaForFragmentList call. So internally RekognizedFragmentsIndex uses two different data * structures for this reason. * * 1. ConcurrentLinkedQueue: Rekognition output is stored in LinkedQueue as soon as it's retrieved from Kinesis * Data Streams. This can be used to integrate with KVS GetMediaForFragmentList API, as the caller gets the fragment * number from Kinesis Data Streams. So the caller needs Rekognition output and fragment number in FIFO order which * is achieved by this Queue. * 2. ConcurrentHashMap: Rekognition output is stored as the value with the corresponding fragment number as the key * for the hash map. This is used while integrating with KVS GetMedia API, as the caller gets the fragment number * from real-time fragments retrieved. So the index needs an efficient search mechanism to search the Rekognition * outputs for a given fragment number. Searching in the above LinkedDeque becomes expensive as the number of items * stored in it increases if none of the items are processed. So this hash map serves as the index for the queue for * fast retrieval O(1) compared to linear search O(N). * */ @Slf4j @ToString public class RekognizedFragmentsIndex { private final ConcurrentHashMap<String, RekognizedFragment> rekognizedOutputMap = new ConcurrentHashMap<>(); private final ConcurrentLinkedQueue<RekognizedFragment> rekognizedOutputQueue = new ConcurrentLinkedQueue<>(); /** * Add Rekognized output to the index for a fragment number and its other attributes like producer time, * server time etc. * * @param fragmentNumber Fragment Number of the fragment * @param producerTime Producer time of the fragment * @param serverTime Server time of the fragment * @param rekognizedOutput Rekognition output of the fragment */ public synchronized void add(final String fragmentNumber, final Long producerTime, final Long serverTime, final RekognizedOutput rekognizedOutput) { if (rekognizedOutputMap.containsKey(fragmentNumber)) { final RekognizedFragment rekognizedFragment = rekognizedOutputMap.get(fragmentNumber); rekognizedFragment.addRekognizedOutput(rekognizedOutput); } else { final RekognizedFragment rekognizedFragment = new RekognizedFragment(fragmentNumber, producerTime, serverTime); rekognizedFragment.addRekognizedOutput(rekognizedOutput); rekognizedOutputQueue.add(rekognizedFragment); rekognizedOutputMap.put(fragmentNumber, rekognizedFragment); } log.debug("Added rekognized fragment number {} to the index.", fragmentNumber); } /** * Polls the index for first available rekognized fragment. * * @return RekognizedFragment if exists. If not returns null. */ public synchronized RekognizedFragment poll() { final RekognizedFragment rekognizedFragment = rekognizedOutputQueue.poll(); rekognizedOutputMap.remove(rekognizedFragment.getFragmentNumber()); return rekognizedFragment; } public int size() { log.debug("Rekognized index Map size : {} queue size : {}", rekognizedOutputMap.size(), rekognizedOutputQueue.size()); if (rekognizedOutputMap.size() != rekognizedOutputQueue.size()) { throw new IllegalStateException("RekognizedFragmentsIndex map and queue size doesn't match"); } return this.rekognizedOutputQueue.size(); } /** * Checks the index for any available rekognized fragment. * * @return true if exists. false otherwise. */ public synchronized boolean isEmpty() { return rekognizedOutputQueue.isEmpty(); } /** * Gets the list of Rekognized Output for the given fragment number. * * @param fragmentNumber Input fragment number. * @return List of rekognized outputs if exists. null otherwise. */ public synchronized List<RekognizedOutput> getRekognizedOutputList(final String fragmentNumber) { return (rekognizedOutputMap.containsKey(fragmentNumber)) ? rekognizedOutputMap.get(fragmentNumber).getRekognizedOutputs() : null; } /** * Removes the rekognized fragment from the index for the given fragment number. * * @param fragmentNumber Input fragment number. */ public synchronized void remove(final String fragmentNumber) { if (rekognizedOutputMap.containsKey(fragmentNumber)) { final RekognizedFragment rekognizedFragment = rekognizedOutputMap.remove(fragmentNumber); rekognizedOutputQueue.remove(rekognizedFragment); } } @Getter @ToString @EqualsAndHashCode @RequiredArgsConstructor public static class RekognizedFragment { private final String fragmentNumber; private final Long producerTime; private final Long serverTime; private final List<RekognizedOutput> rekognizedOutputs = new ArrayList<>(); public void addRekognizedOutput(final RekognizedOutput rekognizedOutput) { this.rekognizedOutputs.add(rekognizedOutput); } } }
5,011
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/FaceType.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.awt.Color; import lombok.Getter; import lombok.RequiredArgsConstructor; /** * Enum which lists down the sample types of the faces detected in given frame. This list can be expanded * based on the face type given in external image id while creating face collection. * * For more information please refer * https://docs.aws.amazon.com/rekognition/latest/dg/add-faces-to-collection-procedure.html */ @Getter @RequiredArgsConstructor public enum FaceType { TRUSTED (Color.GREEN, "Trusted"), CRIMINAL (Color.RED, "Criminal"), UNKNOWN (Color.YELLOW, "Unknown"), NOT_RECOGNIZED (Color.PINK, "NotRecognized"), ALL (Color.BLACK, "All"); private final Color color; private final String prefix; public static FaceType fromString(String value) { for (int i = 0; i < FaceType.values().length; i++) { if(FaceType.values()[i].getPrefix().toUpperCase().equals(value.toUpperCase())) return FaceType.values()[i]; } return FaceType.UNKNOWN; } }
5,012
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/rekognition/pojo/FaceSearchResponse.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.rekognition.pojo; import java.io.Serializable; import java.util.HashMap; import java.util.List; import java.util.Map; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; @JsonInclude(JsonInclude.Include.NON_NULL) public class FaceSearchResponse implements Serializable { @JsonProperty("DetectedFace") private DetectedFace detectedFace; @JsonProperty("MatchedFaces") private List<MatchedFace> matchedFaces = null; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); private final static long serialVersionUID = -5645575235038800306L; @JsonProperty("DetectedFace") public DetectedFace getDetectedFace() { return detectedFace; } @JsonProperty("DetectedFace") public void setDetectedFace(DetectedFace detectedFace) { this.detectedFace = detectedFace; } @JsonProperty("MatchedFaces") public List<MatchedFace> getMatchedFaces() { return matchedFaces; } @JsonProperty("MatchedFaces") public void setMatchedFaces(List<MatchedFace> matchedFaces) { this.matchedFaces = matchedFaces; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } @Override public String toString() { return new ToStringBuilder(this) .append("detectedFace", detectedFace) .append("matchedFaces", matchedFaces) .append("additionalProperties", additionalProperties).toString(); } @Override public int hashCode() { return new HashCodeBuilder() .append(matchedFaces) .append(detectedFace) .append(additionalProperties).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof FaceSearchResponse) == false) { return false; } FaceSearchResponse rhs = ((FaceSearchResponse) other); return new EqualsBuilder() .append(matchedFaces, rhs.matchedFaces) .append(detectedFace, rhs.detectedFace) .append(additionalProperties, rhs.additionalProperties).isEquals(); } }
5,013
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/ParserByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; /** * Represents a source of bytes that can be parsed by the EBML parser. * It could be backed by a ByteBuffer or a netty ByteBuf or an input stream that can support these operations. */ public interface ParserByteSource extends ParserBulkByteSource { int readByte(); int available(); boolean eof(); }
5,014
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLTypeInfo.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; /** * The type information for an EBML element. * This specifies the semantics of the EBML elements in an EBML document. * For example the TypeInfo for MKV will specify the semantics for the EBML elements that make up a MKV document. */ @Builder @AllArgsConstructor(access = AccessLevel.PUBLIC) @Getter @ToString @EqualsAndHashCode public class EBMLTypeInfo { private final int id; private final String name; private final int level; private final TYPE type; @Builder.Default private boolean isRecursive = false; public boolean isGlobal() { return level < 0; } public enum TYPE { INTEGER, UINTEGER, FLOAT, STRING, UTF_8, DATE, MASTER, BINARY } }
5,015
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLParserInternalElement.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.Getter; import lombok.ToString; import org.apache.commons.lang3.Validate; import java.nio.ByteBuffer; import java.util.Optional; import static com.amazonaws.kinesisvideo.parser.ebml.EBMLUtils.UNKNOWN_LENGTH_VALUE; /** * This class is used by the parser to represent an EBML Element internally. */ @ToString class EBMLParserInternalElement { enum ElementReadState { NEW, ID_DONE, SIZE_DONE, CONTENT_READING, CONTENT_SKIPPING, FINISHED } private final long startingOffset; @Getter private final long elementCount; ElementReadState currentElementReadState = ElementReadState.NEW; @Getter private int id; private long idNumBytes; @Getter private long dataSize; private long dataSizeNumBytes; private Optional<EBMLElementMetaData> elementMetaData = Optional.empty(); public EBMLParserInternalElement(long startingOffset, long elementCount) { this.startingOffset = startingOffset; this.elementCount = elementCount; } public void readId(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource) { Validate.isTrue(currentElementReadState == ElementReadState.NEW); idAndSizeByteSource.setReadOffsetForReplayBuffer(startingOffset); EBMLUtils.readId(idAndSizeByteSource, this::setId); } public void readSize(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource) { Validate.isTrue(currentElementReadState == ElementReadState.ID_DONE); idAndSizeByteSource.setReadOffsetForReplayBuffer(startingOffset + idNumBytes); EBMLUtils.readSize(idAndSizeByteSource, this::setSize); } public void updateTypeInfo(EBMLTypeInfoProvider typeInfoProvider) { Validate.isTrue(currentElementReadState == ElementReadState.SIZE_DONE); Optional<EBMLTypeInfo> typeInfo = typeInfoProvider.getType(id); if (typeInfo.isPresent()) { elementMetaData = Optional.of(new EBMLElementMetaData(typeInfo.get(), elementCount)); } } public boolean isKnownType() { return elementMetaData.isPresent(); } public EBMLTypeInfo getTypeInfo() { return elementMetaData.get().getTypeInfo(); } public EBMLElementMetaData getMetadata() { Validate.isTrue(elementMetaData.isPresent(), "EBML element metadata "); return elementMetaData.get(); } public void startReadingContent() { Validate.isTrue(currentElementReadState == ElementReadState.SIZE_DONE); currentElementReadState = ElementReadState.CONTENT_READING; } public void startSkippingContent() { Validate.isTrue(currentElementReadState == ElementReadState.SIZE_DONE); currentElementReadState = ElementReadState.CONTENT_SKIPPING; } public void readContent(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource, ParserBulkByteSource bulkByteSource, EBMLParserCallbacks callbacks, int maxContentBytesInOnePass) { Validate.isTrue(currentElementReadState == ElementReadState.CONTENT_READING); long bytesToRead = getBytesToRead(idAndSizeByteSource, maxContentBytesInOnePass); //Call onPartialContent if bytesToRead > 0. if (bytesToRead > 0) { callbacks.onPartialContent(elementMetaData.get(), bulkByteSource, (int) bytesToRead); } if (!isUnknownLength() && idAndSizeByteSource.getTotalBytesRead() >= getContentStartOffset() + dataSize) { currentElementReadState = ElementReadState.FINISHED; } } public void skipContent(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource, ParserBulkByteSource bulkByteSource, ByteBuffer skipBuffer) { Validate.isTrue(currentElementReadState == ElementReadState.CONTENT_SKIPPING); long bytesToRead = getBytesToRead(idAndSizeByteSource, skipBuffer.remaining()); if (bytesToRead > 0) { bulkByteSource.readBytes(skipBuffer, (int )bytesToRead); } if (idAndSizeByteSource.getTotalBytesRead() >= getContentStartOffset() + dataSize) { currentElementReadState = ElementReadState.FINISHED; } } public boolean isUnknownLength() { return dataSize == UNKNOWN_LENGTH_VALUE; } public long endOffSet() { Validate.isTrue(!isUnknownLength()); return getContentStartOffset() + dataSize; } private long getContentStartOffset() { return startingOffset + idNumBytes + dataSizeNumBytes; } private void setId(int idArg, long idNumBytes) { Validate.isTrue(currentElementReadState == ElementReadState.NEW); this.id = idArg; this.idNumBytes = idNumBytes; currentElementReadState = ElementReadState.ID_DONE; } private void setSize(long sizeArg, long sizeNumBytes) { Validate.isTrue(currentElementReadState == ElementReadState.ID_DONE); this.dataSize = sizeArg; this.dataSizeNumBytes = sizeNumBytes; currentElementReadState = ElementReadState.SIZE_DONE; } private long getBytesToRead(TrackingReplayableIdAndSizeByteSource idAndSizeByteSource, int maxContentBytesInOnePass) { long bytesToRead = dataSize + getContentStartOffset() - (idAndSizeByteSource.getTotalBytesRead()); bytesToRead = Math.min(bytesToRead, maxContentBytesInOnePass); bytesToRead = Math.min(bytesToRead, idAndSizeByteSource.availableForContent()); return bytesToRead; } }
5,016
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLUtils.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import org.apache.commons.lang3.Validate; import java.math.BigInteger; import java.nio.ByteBuffer; /** * Class used to parse EBML Ids and Sizes that make up the EBML element's meta data. */ public class EBMLUtils { public static final long UNKNOWN_LENGTH_VALUE = -1; /** * Max length for a EBML ID */ public static final int EBML_ID_MAX_BYTES = 4; public static final int EBML_SIZE_MAX_BYTES = 8; private static final int BYTE_MASK = 0xFF; /** Default constructor to make checkstyle happy */ private EBMLUtils() { } /** * constant for byte with first bit set. */ private static final int BYTE_WITH_FIRST_BIT_SET = 0b10000000; static void readId(final TrackingReplayableIdAndSizeByteSource source, IdConsumer resultAcceptor) { if (!isEnoughBytes(source, 1)) { return; } final int firstByte = readByte(source); if (firstByte == -1) { resultAcceptor.accept(firstByte, 1); } Validate.isTrue(firstByte >= 0, "EBML Id has negative firstByte" + firstByte); final int numAdditionalBytes = getNumLeadingZeros(firstByte); if (!isEnoughBytes(source, numAdditionalBytes)) { return; } Validate.isTrue(numAdditionalBytes <= (EBML_ID_MAX_BYTES - 1), "Trying to decode an EBML ID and it wants " + numAdditionalBytes + " more bytes, but IDs max out at 4 bytes. firstByte was " + firstByte); final int rest = (int) readEbmlValueNumber(source, numAdditionalBytes); resultAcceptor.accept(firstByte << (numAdditionalBytes * Byte.SIZE) | rest, numAdditionalBytes + 1); } /** * Read a variable-size integer that encodes its own length. * Used to read the size of an ebml element. * <p> * 2.1. Variable size integer * <p> * For both element ID and size descriptor EBML uses a variable size * integer, coded according to a schema similar to that of UTF-8 * [UTF-8] encoding. The variable size integer begins with zero or * more zero bits to define the width of the integer. Zero zeroes * means a width of one byte, one zero a width of two bytes etc. The * zeroes are followed by a marker of one set bit and then follows the * actual integer data. The integer data consists of alignment data * and tail data. The alignment data together with the width * descriptor and the marker makes up one ore more complete bytes. The * tail data is as many bytes as there were zeroes in the width * descriptor, i.e. width-1. * <p> * VINT = VINT_WIDTH VINT_MARKER VINT_DATA * VINT_WIDTH = *%b0 * VINT_MARKER = %b1 * VINT_DATA = VINT_ALIGNMENT VINT_TAIL * VINT_ALIGNMENT = *BIT * VINT_TAIL = *BYTE * <p> * An alternate way of expressing this is the following definition, * where the width is the number of levels of expansion. * <p> * VINT = ( %b0 VINT 7BIT ) / ( %b1 7BIT ) * <p> * Some examples of the encoding of integers of width 1 to 4. The x:es * represent bits where the actual integer value would be stored. * <p> * Width Size Representation * 1 2^7 1xxx xxxx * 2 2^14 01xx xxxx xxxx xxxx * 3 2^21 001x xxxx xxxx xxxx xxxx xxxx * 4 2^28 0001 xxxx xxxx xxxx xxxx xxxx xxxx xxxx * * @param source buffer containing chunks of data * @param resultAcceptor the callback called when the size of an ebml element is identified. * @see "http://www.matroska.org/technical/specs/rfc/index.html" */ private static void readEbmlInt(final TrackingReplayableIdAndSizeByteSource source, SizeConsumer resultAcceptor) { if (!isEnoughBytes(source, 1)) { return; } final int firstByte = readByte(source); Validate.isTrue(firstByte >= 0, "EBML Int has negative firstByte" + firstByte); final int size = getNumLeadingZeros(firstByte); if (!isEnoughBytes(source, size)) { return; } // Read the rest of the bytes final long rest = readEbmlValueNumber(source, size); long value = (firstByte & ~((byte) BYTE_WITH_FIRST_BIT_SET >> size)) << (size * Byte.SIZE) | rest; long unknownValue = (0xff >> (size + 1)); unknownValue <<= size * 8; unknownValue |= (1L << (size * 8)) - 1; // Special handing for unknown length if (value == unknownValue) { value = -1; } // Slap the first byte's value onto the front (with the first one-bit unset) resultAcceptor.accept(value, size + 1); } /** * Read an EBML integer value of varying length from the provided buffer. * @param byteBuffer The buffer to read from. * @return The integer value. * @see "http://www.matroska.org/technical/specs/rfc/index.html" */ public static long readEbmlInt(final ByteBuffer byteBuffer) { final int firstByte = byteBuffer.get() & BYTE_MASK; Validate.isTrue(firstByte >= 0, "EBML Int has negative firstByte" + firstByte); final int size = getNumLeadingZeros(firstByte); // Read the rest of the bytes final long rest = readUnsignedIntegerSevenBytesOrLess(byteBuffer, size); // Slap the first byte's value onto the front (with the first one-bit unset) return ((firstByte & ~((byte) BYTE_WITH_FIRST_BIT_SET >> size)) << (size * Byte.SIZE) | rest); } /** * An alias for readEbmlInt that makes it clear we're reading a data size value. * * @return long value */ static void readSize(final TrackingReplayableIdAndSizeByteSource source, SizeConsumer resultAcceptor) { readEbmlInt(source, resultAcceptor); } private static int readByte(final TrackingReplayableIdAndSizeByteSource source) { return source.readByte() & BYTE_MASK; } private static boolean isEnoughBytes(final TrackingReplayableIdAndSizeByteSource source, final int len) { return source.checkAndReadIntoReplayBuffer(len); } /** * Gets the number of leading zero bits in the specified integer as if it were a byte (to avoid a cast). * <p> * This is the "count leading zeroes" problem: http://en.wikipedia.org/wiki/Find_first_set * <p> * Intel processors actually have this as a built-in instruction but we * can't access that from the JVM. * * @param b byte for which we need to find the number of leading zeros. * This is typed as an int but should only have the lower 8 bits set.A * @return number of leading zeros in the byte. */ private static int getNumLeadingZeros(final int b) { return Integer.numberOfLeadingZeros(b) - (Integer.SIZE - Byte.SIZE); } /** * Read a variable-length data payload as a number, given its size. * <p> * EBML uses big endian/network order byte order, i.e. most * significant bit first. All of the tokens above are byte aligned. * <p> * Besides having an element list as data payload an element can have * its data typed with any of seven predefined data types. The actual * type information isn't stored in EBML but is inferred from the * document type definition through the element ID. The defined data * types are signed integer, unsigned integer, float, ASCII string, * UTF-8 string, date and binary data. * <p> * VALUE = INT / UINT / FLOAT / STRING / DATE / BINARY * <p> * INT = *8BYTE * <p> * Signed integer, represented in two's complement notation, sizes * from 0-8 bytes. A zero byte integer represents the integer value 0. * * @param source buffer containing chunks of data * @param size Size of the integer in bytes * @return long value */ private static long readEbmlValueNumber(final TrackingReplayableIdAndSizeByteSource source, final long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(isEnoughBytes(source, (int) size)); long value = 0; for (int i = 0; i < size; i++) { // readByte(buffer) returns a value from 0-255 as an int, already masked with 0xFF final int result = readByte(source); value = (value << Byte.SIZE) | result; } return value; } /** * A specialized method used to read a variable length unsigned integer of size 7 bytes or less. * @param byteBuffer The byteBuffer to read from. * @param size The size of bytes. * @return The long containing the integer value. */ public static long readUnsignedIntegerSevenBytesOrLess(final ByteBuffer byteBuffer, long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES - 1, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(byteBuffer.remaining() >= size); long value = 0; for (int i = 0; i < size; i++) { final int result = byteBuffer.get() & 0xFF; value = (value << Byte.SIZE) | result; } return value; } public static long readDataSignedInteger(final ByteBuffer byteBuffer, long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(byteBuffer.remaining() >= size); long value = 0; for (int i = 0; i < size; i++) { final int result = byteBuffer.get() & 0xFF; if (i == 0) { boolean positive = (result & 0x80) == 0; if (!positive) { value = -1; } } value = (value << Byte.SIZE) | result; } return value; } public static BigInteger readDataUnsignedInteger(final ByteBuffer byteBuffer, long size) { Validate.inclusiveBetween(0L, (long) EBML_SIZE_MAX_BYTES, size, "Asked for a numeric value of invalid size " + size); Validate.isTrue(byteBuffer.remaining() >= size); byte [] byteArray = new byte[(int)size]; byteBuffer.get(byteArray); return new BigInteger(1, byteArray); } @FunctionalInterface interface IdConsumer { void accept(int val, long idNumBytes); } @FunctionalInterface interface SizeConsumer { void accept(long val, long sizeNumBytes); } }
5,017
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLParser.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.AccessLevel; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.Validate; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.List; import java.util.Stack; import java.util.stream.Collectors; /** * This class is used to parse a stream of EBML. * It is based on the ebml specification published by the Matroska Org * (https://github.com/Matroska-Org/ebml-specification/blob/master/specification.markdown). * * A new instance of this object is created for a new stream of EBML (the response stream for a GetMedia call). * A new instance is configured with a {@link EBMLTypeInfoProvider} that provides the semantics for the EBML document * being parsed and a {@link EBMLParserCallbacks} that receives callbacks as the parser detects different EBML elements. * Once an instance of the EBML parser is created, the parse method is invoked repeatedly. * A stream of EBML is encapsulated by a {@link ParserByteSource} and is an argument to the parse method. * The parse method is non-blocking and consumes all the data passed to it in each invocation. * As the parser detects EBML elements it invokes methods on the {@link EBMLParserCallbacks}. * Once all the data in an EBML stream has being sent to the parser, the method closeParser is called to shutdown * the parser. * * TODO: add implementation details. * */ @Slf4j public class EBMLParser { private static final int BYTE_MASK = 0xFF; //TODO: have it be an argument, either constructor or method private static final int DEFAULT_MAX_CONTENT_BYTES_IN_ONE_PASS = 8192; private final EBMLTypeInfoProvider typeInfoProvider; private final Stack<EBMLParserInternalElement> masterElements; private final EBMLParserCallbacks callbacks; private final int maxContentBytesInOnePass; private final ByteBuffer skipBuffer; private long elementCount = 0; private long totalBytesRead = 0; @Getter(AccessLevel.PACKAGE) private boolean endOfStream; @Getter(AccessLevel.PUBLIC) private boolean closed; private EBMLParserInternalElement currentElement; private ReplayIdAndSizeBuffer replayIdAndSizeBuffer; public EBMLParser(EBMLTypeInfoProvider typeInfoProvider, EBMLParserCallbacks callbacks) { this(typeInfoProvider, callbacks, DEFAULT_MAX_CONTENT_BYTES_IN_ONE_PASS); } public EBMLParser(EBMLTypeInfoProvider typeInfoProvider, EBMLParserCallbacks callbacks, int maxContentBytesInOnePass) { this.typeInfoProvider = typeInfoProvider; this.callbacks = callbacks; this.replayIdAndSizeBuffer = new ReplayIdAndSizeBuffer(EBMLUtils.EBML_ID_MAX_BYTES + EBMLUtils.EBML_SIZE_MAX_BYTES); createNewCurrentElementInfo(); this.masterElements = new Stack<>(); this.maxContentBytesInOnePass = maxContentBytesInOnePass; this.skipBuffer = ByteBuffer.allocate(maxContentBytesInOnePass); log.debug("Creating EBMLParser with maxContentBytesInOnePass {}", this.maxContentBytesInOnePass); } public void parse(ParserByteSource byteSource) { try (CallState callState = new CallState(byteSource)) { while (callState.shouldContinueParsing()) { if (log.isDebugEnabled()) { log.debug("Current element read state {}", currentElement.currentElementReadState); } switch (currentElement.currentElementReadState) { case NEW: //check if any master elements are done because their end offset has been reached. removeMasterElementsBasedOnSizeEnd(); currentElement.readId(callState); break; case ID_DONE: currentElement.readSize(callState); break; case SIZE_DONE: currentElement.updateTypeInfo(typeInfoProvider); //check if any master elements are done because an equal or higher level //element is reached. removeMasterElementsBasedOnLevel(); //Call onstartForElement(); if (currentElement.isKnownType()) { log.debug("Invoking onStartElement for current element {}", currentElement); callbacks.onStartElement(currentElement.getMetadata(), currentElement.getDataSize(), replayIdAndSizeBuffer.getByteBuffer(), this::currentElementPath); } startReadingContentBasedOnType(); break; case CONTENT_READING: Validate.isTrue(currentElement.isKnownType(), "We should read only from elements with known types"); currentElement.readContent(callState, callState, callbacks, maxContentBytesInOnePass); break; case CONTENT_SKIPPING: Validate.isTrue(!currentElement.isKnownType(), "We should skip data for unknown elements only"); skipBuffer.rewind(); currentElement.skipContent(callState, callState, skipBuffer); break; case FINISHED: invokeOnEndElementCallback(currentElement); //check if any master elements are done because their end offset has been reached. removeMasterElementsBasedOnSizeEnd(); createNewCurrentElementInfo(); break; default: throw new IllegalArgumentException("Unexpected ElementReadState"); } } log.debug("Stopping parsing"); if (endOfStream) { closeParser(); } } } public void closeParser() { if (!closed) { log.debug("Closing EBMLParser"); //close current element if (currentElement != null && currentElement.isKnownType()) { log.debug("Closing with currentElement {} still set, invoking end element callback on it", currentElement); invokeOnEndElementCallback(currentElement); currentElement = null; } log.debug("Closing with {} master elements on stack, invoking end element callback on them", masterElements.size()); while (!masterElements.isEmpty()) { EBMLParserInternalElement top = masterElements.pop(); //TODO: see if we need to add a flag to indicate unclean close invokeOnEndElementCallback(top); } } closed = true; } private void startReadingContentBasedOnType() { if (!currentElement.isKnownType()) { Validate.isTrue(!currentElement.isUnknownLength(), "Cannot skip element of unknown length"); currentElement.startSkippingContent(); log.warn("Will skip content for element number {} with unknown id {} datasize {}", currentElement.getElementCount(), currentElement.getId(), currentElement.getDataSize()); } else { if (currentElement.getTypeInfo().getType() == EBMLTypeInfo.TYPE.MASTER) { //Mark the master element as started although it will consist of //child elements. So, push it into the stack of master elements whose //contents are currently being read. currentElement.startReadingContent(); masterElements.push(currentElement); createNewCurrentElementInfo(); } else { //A non-master element should not have unknown or infinite length //as that prevents the parser finding the end of the element. Validate.isTrue(!currentElement.isUnknownLength(), "A non-master element should not have unknown length"); //start reading contents. currentElement.startReadingContent(); } } } private void removeMasterElementsBasedOnLevel() { if (!currentElement.isKnownType()) { return; } if (!currentElement.getTypeInfo().isGlobal()) { while (!masterElements.isEmpty()) { EBMLParserInternalElement top = masterElements.peek(); //For handling master elements with the wrong size (such as segments) //We should finish master elements of known size is another element of the same or //lower level is found. Validate.isTrue(currentElement.getElementCount() != top.getElementCount()); if (currentElement.getTypeInfo().getLevel() <= top.getTypeInfo().getLevel()) { log.debug("Removing master element {} based on level of current element {}", top, currentElement); masterElements.pop(); invokeOnEndElementCallback(top); } else { break; } } } } private void removeMasterElementsBasedOnSizeEnd() { if (!currentElement.isKnownType()) { return; } while (!masterElements.isEmpty()) { EBMLParserInternalElement top = masterElements.peek(); if (!top.isUnknownLength()) { if (top.endOffSet() <= totalBytesRead) { log.debug("Removing master element {} based on size end {}", top, totalBytesRead); masterElements.pop(); invokeOnEndElementCallback(top); } else { break; } } else { break; } } } private List<EBMLElementMetaData> currentElementPath() { return masterElements.stream().map(EBMLParserInternalElement::getMetadata).collect(Collectors.toList()); } private void invokeOnEndElementCallback(EBMLParserInternalElement finishedElement) { if (finishedElement.isKnownType()) { log.debug("Invoking onStartElement for current element {}", finishedElement); callbacks.onEndElement(finishedElement.getMetadata(), this::currentElementPath); } } private void createNewCurrentElementInfo() { currentElement = new EBMLParserInternalElement(totalBytesRead, elementCount); elementCount++; replayIdAndSizeBuffer.init(totalBytesRead); } /** * This internal class maintains state for each parse call. */ @RequiredArgsConstructor private class CallState implements Closeable, TrackingReplayableIdAndSizeByteSource, ParserBulkByteSource { private boolean parseMore = true; private final ParserByteSource byteSource; @Setter private long readOffsetForReplayBuffer; @Override public long getTotalBytesRead() { return totalBytesRead; } @Override public void close() { } boolean shouldContinueParsing() { return !endOfStream && parseMore && callbacks.continueParsing(); } @Override public boolean checkAndReadIntoReplayBuffer(int len) { if (parseMore) { int availableInReplayBuffer = replayIdAndSizeBuffer.availableAfter(readOffsetForReplayBuffer); Validate.isTrue(availableInReplayBuffer >= 0); if (availableInReplayBuffer >= len) { return true; } else { int numBytesToRead = len - availableInReplayBuffer; parseMore = byteSource.available() >= numBytesToRead; numBytesToRead = Math.min(numBytesToRead, byteSource.available()); for (int i = 0; i < numBytesToRead; i++) { readFromByteSourceIntoReplayBuffer(); } } } return parseMore; } @Override public int readByte() { if (replayIdAndSizeBuffer.inReplayBuffer(readOffsetForReplayBuffer)) { byte result = replayIdAndSizeBuffer.getByteFromOffset(readOffsetForReplayBuffer); readOffsetForReplayBuffer++; return result & BYTE_MASK; } else { int result = readFromByteSourceIntoReplayBuffer(); readOffsetForReplayBuffer++; return result; } } private int readFromByteSourceIntoReplayBuffer() { int result = byteSource.readByte(); if (result == -1) { markAsEndofStream(); return -1; } Validate.inclusiveBetween(0, BYTE_MASK, result); replayIdAndSizeBuffer.addByte((byte) result); totalBytesRead++; return result; } @Override public int availableForContent() { if (parseMore) { int availableBytes = byteSource.available(); if (availableBytes == 0) { parseMore = false; } return availableBytes; } return 0; } @Override public int readBytes(ByteBuffer dest, int numBytes) { int readBytes = byteSource.readBytes(dest, numBytes); if (readBytes == -1) { markAsEndofStream(); return readBytes; } Validate.isTrue(readBytes >= 0); totalBytesRead += readBytes; return readBytes; } private void markAsEndofStream() { endOfStream = true; parseMore = false; } } }
5,018
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/ReplayIdAndSizeBuffer.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import org.apache.commons.lang3.Validate; import java.nio.ByteBuffer; /** * Buffer used to replay the id and size of ebml elements in the ebml parser */ class ReplayIdAndSizeBuffer { private int count; private final byte[] buffer; private long startingOffset; ReplayIdAndSizeBuffer(int length) { buffer = new byte[length]; } void init(long startingOffset) { this.startingOffset = startingOffset; count = 0; } void addByte(byte val) { Validate.isTrue(count < buffer.length, "Too many bytes being added to replay buffer " + count); buffer[count] = val; count++; } boolean inReplayBuffer(long readOffset) { return (readOffset - startingOffset) < count; } int availableAfter(long readOffset) { return (int) Math.max(0, startingOffset + count - readOffset); } byte getByteFromOffset(long readOffset) { Validate.isTrue(inReplayBuffer(readOffset), "Attempt to read from replay buffer at " + readOffset + "while buffer starts at" + startingOffset + "and has " + count + "bytes"); return buffer[(int) (readOffset - startingOffset)]; } ByteBuffer getByteBuffer() { return ByteBuffer.wrap(buffer, 0, count); } }
5,019
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLTypeInfoProvider.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import java.util.Optional; /** * An interface that vends type information used by the EBML parser. */ public interface EBMLTypeInfoProvider { Optional<EBMLTypeInfo> getType(int id); }
5,020
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/InputStreamParserByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import org.apache.commons.lang3.Validate; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; /** * An implementation of ParserByteSource that wraps an input stream containing the EBML stream. */ public class InputStreamParserByteSource implements ParserByteSource { private static final int BUFFER_SIZE = 8192; private static final int MARK_SIZE = 100; private final BufferedInputStream bufferedInputStream; public InputStreamParserByteSource(final InputStream inputStream) { this(inputStream, BUFFER_SIZE); } InputStreamParserByteSource(final InputStream inputStream, final int bufferSize) { bufferedInputStream = new BufferedInputStream(inputStream, bufferSize); Validate.isTrue(bufferedInputStream.markSupported()); } @Override public int readByte() { try { return bufferedInputStream.read(); } catch (final IOException e) { throw new RuntimeException("Exception while reading byte from input stream!", e); } } @Override public int available() { try { return bufferedInputStream.available(); } catch (final IOException e) { throw new RuntimeException("Exception while getting available bytes from input stream!", e); } } @Override public int readBytes(final ByteBuffer dest, final int numBytes) { try { Validate.isTrue(dest.remaining() >= numBytes); final int numBytesRead = bufferedInputStream.read(dest.array(), dest.position(), numBytes); if (numBytesRead > 0) { dest.position(dest.position() + numBytesRead); } return numBytesRead; } catch (final IOException e) { throw new RuntimeException("Exception while reading bytes from input stream!", e); } } @Override public boolean eof() { try { bufferedInputStream.mark(MARK_SIZE); if (readByte() == -1) { return true; } bufferedInputStream.reset(); return false; } catch (final IOException e) { throw new RuntimeException("Exception while resetting input stream!", e); } } }
5,021
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/TrackingReplayableIdAndSizeByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; /** * An interface representing a byte source that can replay the bytes for ebml id and size. * It also keeps track of the total number of bytes read by the parser from the underlying * byte source. * This wraps a parser byte source passed in by the user. */ interface TrackingReplayableIdAndSizeByteSource { boolean checkAndReadIntoReplayBuffer(int len); int readByte(); int availableForContent(); void setReadOffsetForReplayBuffer(long readOffset); long getTotalBytesRead(); }
5,022
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/ParserBulkByteSource.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import java.nio.ByteBuffer; /** * An interface representing a byte source for the parser which allows bulk reads. */ public interface ParserBulkByteSource { int readBytes(ByteBuffer dest, int numBytes); }
5,023
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLElementMetaData.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; /** * Class that represents the metadata of a single EBML element in an EBML stream. * It does not contain the actual data or content of the EBML element. */ @Getter @Builder @ToString @EqualsAndHashCode public class EBMLElementMetaData { private final EBMLTypeInfo typeInfo; private final long elementNumber; public boolean isMaster() { return typeInfo.getType() == EBMLTypeInfo.TYPE.MASTER; } }
5,024
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/EBMLParserCallbacks.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; import java.nio.ByteBuffer; import java.util.List; /** * The EBMLParser invokes these callbacks when it detects the start, end and contents of elements. */ public interface EBMLParserCallbacks { void onStartElement(EBMLElementMetaData elementMetaData, long elementDataSize, ByteBuffer idAndSizeRawBytes, ElementPathSupplier pathSupplier); void onPartialContent(EBMLElementMetaData elementMetaData, ParserBulkByteSource bulkByteSource, int bytesToRead); void onEndElement(EBMLElementMetaData elementMetaData, ElementPathSupplier pathSupplier); default boolean continueParsing() { return true; } @FunctionalInterface interface ElementPathSupplier { List<EBMLElementMetaData> getAncestors(); } }
5,025
0
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser
Create_ds/amazon-kinesis-video-streams-parser-library/src/main/java/com/amazonaws/kinesisvideo/parser/ebml/MkvTypeInfos.java
/* Copyright 2017-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.kinesisvideo.parser.ebml; /** * Type information for the EBML elements in a Mkv file or stream. * This provides the semantics of the EBML elements in a Mkv file or stream. * This is based on the xml file hosted by the matroska org at * https://github.com/Matroska-Org/foundation-source/blob/master/spectool/specdata.xml (commit e074b5d) */ public class MkvTypeInfos { public static final EBMLTypeInfo EBML = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBML").id(0x1A45DFA3).level(0).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo EBMLVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLVersion").id(0x4286).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EBMLREADVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLReadVersion").id(0x42F7).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EBMLMAXIDLENGTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLMaxIDLength").id(0x42F2).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EBMLMAXSIZELENGTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EBMLMaxSizeLength").id(0x42F3).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DOCTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DocType").id(0x4282).level(1).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo DOCTYPEVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DocTypeVersion").id(0x4287).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DOCTYPEREADVERSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DocTypeReadVersion").id(0x4285).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo VOID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Void").id(0xEC).level(-1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CRC_32 = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CRC-32").id(0xBF).level(-1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SIGNATURESLOT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureSlot").id(0x1B538667).level(-1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SIGNATUREALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureAlgo").id(0x7E8A).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIGNATUREHASH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureHash").id(0x7E9A).level(1).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIGNATUREPUBLICKEY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignaturePublicKey").id(0x7EA5).level(1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SIGNATURE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Signature").id(0x7EB5).level(1).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SIGNATUREELEMENTS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureElements").id(0x7E5B).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SIGNATUREELEMENTLIST = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignatureElementList").id(0x7E7B).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SIGNEDELEMENT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SignedElement").id(0x6532).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SEGMENT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Segment").id(0x18538067).level(0).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEEKHEAD = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekHead").id(0x114D9B74).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEEK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Seek").id(0x4DBB).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEEKID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekID").id(0x53AB).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SEEKPOSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekPosition").id(0x53AC).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo INFO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Info").id(0x1549A966).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SegmentUID").id(0x73A4).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo SEGMENTFILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SegmentFilename").id(0x7384).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo PREVUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrevUID").id(0x3CB923).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo PREVFILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrevFilename").id(0x3C83AB).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo NEXTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("NextUID").id(0x3EB923).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo NEXTFILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("NextFilename").id(0x3E83BB).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo SEGMENTFAMILY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SegmentFamily").id(0x4444).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CHAPTERTRANSLATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslate").id(0x6924).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPTERTRANSLATEEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslateEditionUID").id(0x69FC).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTRANSLATECODEC = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslateCodec").id(0x69BF).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTRANSLATEID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTranslateID").id(0x69A5).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TIMECODESCALE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TimecodeScale").id(0x2AD7B1).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Duration").id(0x4489).level(2).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo DATEUTC = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DateUTC").id(0x4461).level(2).type( EBMLTypeInfo.TYPE.DATE).build(); public static final EBMLTypeInfo TITLE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Title").id(0x7BA9).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo MUXINGAPP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MuxingApp").id(0x4D80).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo WRITINGAPP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("WritingApp").id(0x5741).level(2).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CLUSTER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Cluster").id(0x1F43B675).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TIMECODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Timecode").id(0xE7).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SILENTTRACKS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SilentTracks").id(0x5854).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SILENTTRACKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SilentTrackNumber").id(0x58D7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo POSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Position").id(0xA7).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PREVSIZE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrevSize").id(0xAB).level(2).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIMPLEBLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SimpleBlock").id(0xA3).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKGROUP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockGroup").id(0xA0).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo BLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Block").id(0xA1).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKVIRTUAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockVirtual").id(0xA2).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKADDITIONS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAdditions").id(0x75A1).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo BLOCKMORE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockMore").id(0xA6).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo BLOCKADDID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAddID").id(0xEE).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo BLOCKADDITIONAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAdditional").id(0xA5).level(5).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BLOCKDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockDuration").id(0x9B).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCEPRIORITY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferencePriority").id(0xFA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCEBLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceBlock").id(0xFB).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo REFERENCEVIRTUAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceVirtual").id(0xFD).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo CODECSTATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecState").id(0xA4).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo DISCARDPADDING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DiscardPadding").id(0x75A2).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo SLICES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Slices").id(0x8E).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TIMESLICE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TimeSlice").id(0xE8).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo LACENUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("LaceNumber").id(0xCC).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FRAMENUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FrameNumber").id(0xCD).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo BLOCKADDITIONID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BlockAdditionID").id(0xCB).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DELAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Delay").id(0xCE).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SLICEDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SliceDuration").id(0xCF).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCEFRAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceFrame").id(0xC8).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo REFERENCEOFFSET = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceOffset").id(0xC9).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo REFERENCETIMECODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ReferenceTimeCode").id(0xCA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ENCRYPTEDBLOCK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EncryptedBlock").id(0xAF).level(2).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TRACKS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Tracks").id(0x1654AE6B).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKENTRY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackEntry").id(0xAE).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackNumber").id(0xD7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackUID").id(0x73C5).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackType").id(0x83).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGENABLED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagEnabled").id(0xB9).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGDEFAULT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagDefault").id(0x88).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGFORCED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagForced").id(0x55AA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FLAGLACING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagLacing").id(0x9C).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MINCACHE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MinCache").id(0x6DE7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MAXCACHE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxCache").id(0x6DF8).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DEFAULTDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DefaultDuration").id(0x23E383).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DEFAULTDECODEDFIELDDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DefaultDecodedFieldDuration").id(0x234E7A).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTIMECODESCALE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTimecodeScale").id(0x23314F).level(3).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo TRACKOFFSET = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackOffset").id(0x537F).level(3).type( EBMLTypeInfo.TYPE.INTEGER).build(); public static final EBMLTypeInfo MAXBLOCKADDITIONID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxBlockAdditionID").id(0x55EE).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo NAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Name").id(0x536E).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo LANGUAGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Language").id(0x22B59C).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecID").id(0x86).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECPRIVATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecPrivate").id(0x63A2).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CODECNAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecName").id(0x258688).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo ATTACHMENTLINK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AttachmentLink").id(0x7446).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CODECSETTINGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecSettings").id(0x3A9697).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CODECINFOURL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecInfoURL").id(0x3B4040).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECDOWNLOADURL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecDownloadURL").id(0x26B240).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CODECDECODEALL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecDecodeAll").id(0xAA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKOVERLAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackOverlay").id(0x6FAB).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CODECDELAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CodecDelay").id(0x56AA).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SEEKPREROLL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SeekPreRoll").id(0x56BB).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTRANSLATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslate").id(0x6624).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKTRANSLATEEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslateEditionUID").id(0x66FC).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTRANSLATECODEC = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslateCodec").id(0x66BF).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKTRANSLATETRACKID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackTranslateTrackID").id(0x66A5).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo VIDEO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Video").id(0xE0).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo FLAGINTERLACED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FlagInterlaced").id(0x9A).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FIELDORDER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FieldOrder").id(0x9D).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo STEREOMODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("StereoMode").id(0x53B8).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ALPHAMODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AlphaMode").id(0x53C0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo OLDSTEREOMODE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("OldStereoMode").id(0x53B9).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELWIDTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelWidth").id(0xB0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELHEIGHT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelHeight").id(0xBA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPBOTTOM = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropBottom").id(0x54AA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPTOP = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropTop").id(0x54BB).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPLEFT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropLeft").id(0x54CC).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PIXELCROPRIGHT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PixelCropRight").id(0x54DD).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DISPLAYWIDTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DisplayWidth").id(0x54B0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DISPLAYHEIGHT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DisplayHeight").id(0x54BA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo DISPLAYUNIT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("DisplayUnit").id(0x54B2).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ASPECTRATIOTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AspectRatioType").id(0x54B3).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo COLOURSPACE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ColourSpace").id(0x2EB524).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo GAMMAVALUE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("GammaValue").id(0x2FB523).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo FRAMERATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FrameRate").id(0x2383E3).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo COLOUR = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Colour").id(0x55B0).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo MATRIXCOEFFICIENTS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MatrixCoefficients").id(0x55B1).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo BITSPERCHANNEL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BitsPerChannel").id(0x55B2).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASUBSAMPLINGHORZ = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSubsamplingHorz").id(0x55B3).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASUBSAMPLINGVERT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSubsamplingVert").id(0x55B4).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CBSUBSAMPLINGHORZ = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CbSubsamplingHorz").id(0x55B5).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CBSUBSAMPLINGVERT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CbSubsamplingVert").id(0x55B6).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASITINGHORZ = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSitingHorz").id(0x55B7).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHROMASITINGVERT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChromaSitingVert").id(0x55B8).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo RANGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Range").id(0x55B9).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRANSFERCHARACTERISTICS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TransferCharacteristics").id(0x55BA).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo PRIMARIES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Primaries").id(0x55BB).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MAXCLL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxCLL").id(0x55BC).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MAXFALL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MaxFALL").id(0x55BD).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo MASTERINGMETADATA = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("MasteringMetadata").id(0x55D0).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo PRIMARYRCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryRChromaticityX").id(0x55D1).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYRCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryRChromaticityY").id(0x55D2).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYGCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryGChromaticityX").id(0x55D3).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYGCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryGChromaticityY").id(0x55D4).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYBCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryBChromaticityX").id(0x55D5).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo PRIMARYBCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("PrimaryBChromaticityY").id(0x55D6).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo WHITEPOINTCHROMATICITYX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("WhitePointChromaticityX").id(0x55D7).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo WHITEPOINTCHROMATICITYY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("WhitePointChromaticityY").id(0x55D8).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo LUMINANCEMAX = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("LuminanceMax").id(0x55D9).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo LUMINANCEMIN = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("LuminanceMin").id(0x55DA).level(6).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo AUDIO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Audio").id(0xE1).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo SAMPLINGFREQUENCY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SamplingFrequency").id(0xB5).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo OUTPUTSAMPLINGFREQUENCY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("OutputSamplingFrequency").id(0x78B5).level(4).type( EBMLTypeInfo.TYPE.FLOAT).build(); public static final EBMLTypeInfo CHANNELS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Channels").id(0x9F).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHANNELPOSITIONS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChannelPositions").id(0x7D7B).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo BITDEPTH = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("BitDepth").id(0x6264).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKOPERATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackOperation").id(0xE2).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKCOMBINEPLANES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackCombinePlanes").id(0xE3).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKPLANE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackPlane").id(0xE4).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKPLANEUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackPlaneUID").id(0xE5).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKPLANETYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackPlaneType").id(0xE6).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRACKJOINBLOCKS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackJoinBlocks").id(0xE9).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TRACKJOINUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrackJoinUID").id(0xED).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKTRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickTrackUID").id(0xC0).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKTRACKSEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickTrackSegmentUID").id(0xC1).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TRICKTRACKFLAG = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickTrackFlag").id(0xC6).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKMASTERTRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickMasterTrackUID").id(0xC7).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TRICKMASTERTRACKSEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TrickMasterTrackSegmentUID").id(0xC4).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTENCODINGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodings").id(0x6D80).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTENCODING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncoding").id(0x6240).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTENCODINGORDER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodingOrder").id(0x5031).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTENCODINGSCOPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodingScope").id(0x5032).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTENCODINGTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncodingType").id(0x5033).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTCOMPRESSION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentCompression").id(0x5034).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTCOMPALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentCompAlgo").id(0x4254).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTCOMPSETTINGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentCompSettings").id(0x4255).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTENCRYPTION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncryption").id(0x5035).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CONTENTENCALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncAlgo").id(0x47E1).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTENCKEYID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentEncKeyID").id(0x47E2).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTSIGNATURE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSignature").id(0x47E3).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTSIGKEYID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSigKeyID").id(0x47E4).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CONTENTSIGALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSigAlgo").id(0x47E5).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CONTENTSIGHASHALGO = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ContentSigHashAlgo").id(0x47E6).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUES = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Cues").id(0x1C53BB6B).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUEPOINT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CuePoint").id(0xBB).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUETIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueTime").id(0xB3).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUETRACKPOSITIONS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueTrackPositions").id(0xB7).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUETRACK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueTrack").id(0xF7).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUECLUSTERPOSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueClusterPosition").id(0xF1).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUERELATIVEPOSITION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRelativePosition").id(0xF0).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEDURATION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueDuration").id(0xB2).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEBLOCKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueBlockNumber").id(0x5378).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUECODECSTATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueCodecState").id(0xEA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFERENCE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueReference").id(0xDB).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CUEREFTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefTime").id(0x96).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFCLUSTER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefCluster").id(0x97).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefNumber").id(0x535F).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CUEREFCODECSTATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("CueRefCodecState").id(0xEB).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo ATTACHMENTS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Attachments").id(0x1941A469).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo ATTACHEDFILE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("AttachedFile").id(0x61A7).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo FILEDESCRIPTION = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileDescription").id(0x467E).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo FILENAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileName").id(0x466E).level(3).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo FILEMIMETYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileMimeType").id(0x4660).level(3).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo FILEDATA = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileData").id(0x465C).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo FILEUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileUID").id(0x46AE).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FILEREFERRAL = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileReferral").id(0x4675).level(3).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo FILEUSEDSTARTTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileUsedStartTime").id(0x4661).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo FILEUSEDENDTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("FileUsedEndTime").id(0x4662).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Chapters").id(0x1043A770).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo EDITIONENTRY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionEntry").id(0x45B9).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo EDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionUID").id(0x45BC).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EDITIONFLAGHIDDEN = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionFlagHidden").id(0x45BD).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EDITIONFLAGDEFAULT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionFlagDefault").id(0x45DB).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo EDITIONFLAGORDERED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("EditionFlagOrdered").id(0x45DD).level(3).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERATOM = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterAtom").id(0xB6).level(3).type( EBMLTypeInfo.TYPE.MASTER).isRecursive(true).build(); public static final EBMLTypeInfo CHAPTERUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterUID").id(0x73C4).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERSTRINGUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterStringUID").id(0x5654).level(4).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CHAPTERTIMESTART = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTimeStart").id(0x91).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTIMEEND = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTimeEnd").id(0x92).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERFLAGHIDDEN = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterFlagHidden").id(0x98).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERFLAGENABLED = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterFlagEnabled").id(0x4598).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERSEGMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterSegmentUID").id(0x6E67).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CHAPTERSEGMENTEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterSegmentEditionUID").id(0x6EBC).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERPHYSICALEQUIV = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterPhysicalEquiv").id(0x63C3).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERTRACK = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTrack").id(0x8F).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPTERTRACKNUMBER = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterTrackNumber").id(0x89).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPTERDISPLAY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapterDisplay").id(0x80).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPSTRING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapString").id(0x85).level(5).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo CHAPLANGUAGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapLanguage").id(0x437C).level(5).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CHAPCOUNTRY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapCountry").id(0x437E).level(5).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo CHAPPROCESS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcess").id(0x6944).level(4).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPPROCESSCODECID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessCodecID").id(0x6955).level(5).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPPROCESSPRIVATE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessPrivate").id(0x450D).level(5).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo CHAPPROCESSCOMMAND = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessCommand").id(0x6911).level(5).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo CHAPPROCESSTIME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessTime").id(0x6922).level(6).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo CHAPPROCESSDATA = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("ChapProcessData").id(0x6933).level(6).type( EBMLTypeInfo.TYPE.BINARY).build(); public static final EBMLTypeInfo TAGS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Tags").id(0x1254C367).level(1).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TAG = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Tag").id(0x7373).level(2).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TARGETS = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("Targets").id(0x63C0).level(3).type( EBMLTypeInfo.TYPE.MASTER).build(); public static final EBMLTypeInfo TARGETTYPEVALUE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TargetTypeValue").id(0x68CA).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TARGETTYPE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TargetType").id(0x63CA).level(4).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo TAGTRACKUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagTrackUID").id(0x63C5).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGEDITIONUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagEditionUID").id(0x63C9).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGCHAPTERUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagChapterUID").id(0x63C4).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGATTACHMENTUID = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagAttachmentUID").id(0x63C6).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo SIMPLETAG = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("SimpleTag").id(0x67C8).level(3).type( EBMLTypeInfo.TYPE.MASTER).isRecursive(true).build(); public static final EBMLTypeInfo TAGNAME = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagName").id(0x45A3).level(4).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo TAGLANGUAGE = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagLanguage").id(0x447A).level(4).type( EBMLTypeInfo.TYPE.STRING).build(); public static final EBMLTypeInfo TAGDEFAULT = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagDefault").id(0x4484).level(4).type( EBMLTypeInfo.TYPE.UINTEGER).build(); public static final EBMLTypeInfo TAGSTRING = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagString").id(0x4487).level(4).type( EBMLTypeInfo.TYPE.UTF_8).build(); public static final EBMLTypeInfo TAGBINARY = new EBMLTypeInfo.EBMLTypeInfoBuilder().name("TagBinary").id(0x4485).level(4).type( EBMLTypeInfo.TYPE.BINARY).build(); }
5,026
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ReviewSubscriptionIntegrationTest.java
package com.example.demo; import com.example.demo.generated.client.AddReviewGraphQLQuery; import com.example.demo.generated.client.AddReviewProjectionRoot; import com.example.demo.generated.client.ReviewAddedGraphQLQuery; import com.example.demo.generated.client.ReviewAddedProjectionRoot; import com.example.demo.generated.types.SubmittedReview; import com.netflix.graphql.dgs.client.MonoGraphQLClient; import com.netflix.graphql.dgs.client.WebSocketGraphQLClient; import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.web.server.LocalServerPort; import org.springframework.web.reactive.function.client.WebClient; import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient; import reactor.core.publisher.Flux; import reactor.test.StepVerifier; import java.time.Duration; import java.util.Collections; @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) public class ReviewSubscriptionIntegrationTest { @LocalServerPort private Integer port; private WebSocketGraphQLClient webSocketGraphQLClient; private MonoGraphQLClient graphQLClient; @BeforeEach public void setup() { webSocketGraphQLClient = new WebSocketGraphQLClient("ws://localhost:" + port + "/subscriptions", new ReactorNettyWebSocketClient()); graphQLClient = MonoGraphQLClient.createWithWebClient(WebClient.create(("http://localhost:" + port + "/graphql"))); } @Test public void testWebSocketSubscription() { GraphQLQueryRequest subscriptionRequest = new GraphQLQueryRequest( ReviewAddedGraphQLQuery.newRequest().showId(1).build(), new ReviewAddedProjectionRoot<>().starScore() ); GraphQLQueryRequest addReviewMutation1 = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest().review(SubmittedReview.newBuilder().showId(1).starScore(5).username("DGS User").build()).build(), new AddReviewProjectionRoot<>().starScore() ); GraphQLQueryRequest addReviewMutation2 = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest().review(SubmittedReview.newBuilder().showId(1).starScore(3).username("DGS User").build()).build(), new AddReviewProjectionRoot<>().starScore() ); Flux<Integer> starScore = webSocketGraphQLClient.reactiveExecuteQuery(subscriptionRequest.serialize(), Collections.emptyMap()).map(r -> r.extractValue("reviewAdded.starScore")); StepVerifier.create(starScore) .thenAwait(Duration.ofSeconds(1)) .then(() -> { graphQLClient.reactiveExecuteQuery(addReviewMutation1.serialize(), Collections.emptyMap()).block(); }) .then(() -> graphQLClient.reactiveExecuteQuery(addReviewMutation2.serialize(), Collections.emptyMap()).block()) .expectNext(5) .expectNext(3) .thenCancel() .verify(); } }
5,027
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ArtworkUploadDataFetcherTest.java
package com.example.demo; import com.example.demo.generated.types.Image; import com.jayway.jsonpath.TypeRef; import com.netflix.graphql.dgs.DgsQueryExecutor; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.mock.web.MockMultipartFile; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import static org.assertj.core.api.AssertionsForClassTypes.assertThat; @SpringBootTest class ArtworkUploadDataFetcherTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @Test void addArtwork() { int showId = new Random().nextInt(); Map<String, Object> map = new HashMap<String, Object>() {{ put("showId", showId); put("upload", new MockMultipartFile("test", "test.file", "text/plain", "test".getBytes())); }}; String mutation = "mutation addArtwork($showId:Int!, $upload:Upload!) { addArtwork(showId:$showId, upload:$upload) {url} }"; List<Image> result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( mutation, "data.addArtwork", map, new TypeRef<List<Image>>() { } ); assertThat(result.size()).isNotZero(); assertThat(result.get(0).getUrl()).contains(String.valueOf(showId)); } }
5,028
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/SecurityExampleFetchersTest.java
package com.example.demo; import com.netflix.graphql.dgs.DgsQueryExecutor; import com.netflix.graphql.dgs.exceptions.QueryException; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.security.test.context.support.WithMockUser; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertThrows; @SpringBootTest class SecurityExampleFetchersTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @Test void secureNone() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureNone }", "data.secureNone", String.class); assertThat(result).isEqualTo("Hello to everyone"); } @Test @WithMockUser(username = "user", password = "user") void secureUserWithUser() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureUser }", "data.secureUser", String.class ); assertThat(result).isEqualTo("Hello to users or admins"); } @Test @WithMockUser(username = "admin", password = "admin", roles = {"ADMIN"}) void secureUserWithAdmin() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureUser }", "data.secureUser", String.class ); assertThat(result).isEqualTo("Hello to users or admins"); } @Test void secureUserWithNone() { assertThrows(QueryException.class, () -> { dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureUser }", "data.secureUser", String.class ); }); } @Test @WithMockUser(username = "admin", password = "admin", roles = {"ADMIN"}) void secureAdminWithAdmin() { String result = dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureAdmin }", "data.secureAdmin", String.class ); assertThat(result).isEqualTo("Hello to admins only"); } @Test @WithMockUser(username = "user", password = "user") void secureAdminWithUser() { assertThrows(QueryException.class, () -> { dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureAdmin }", "data.secureAdmin", String.class ); }); } @Test void secureAdminWithNone() { assertThrows(QueryException.class, () -> { dgsQueryExecutor.executeAndExtractJsonPathAsObject( " { secureAdmin }", "data.secureAdmin", String.class ); }); } }
5,029
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ReviewSubscriptionTest.java
package com.example.demo; import com.example.demo.datafetchers.ReviewsDataFetcher; import com.example.demo.generated.client.AddReviewGraphQLQuery; import com.example.demo.generated.client.AddReviewProjectionRoot; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.SubmittedReview; import com.example.demo.scalars.DateTimeScalar; import com.example.demo.services.DefaultReviewsService; import com.example.demo.services.ShowsService; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.graphql.dgs.DgsQueryExecutor; import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration; import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest; import graphql.ExecutionResult; import org.junit.jupiter.api.Test; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import static org.assertj.core.api.AssertionsForClassTypes.assertThat; /** * Test the review added subscription. * The subscription query returns a Publisher<ExecutionResult>. * Each time a review is added, a new ExecutionResult is given to subscriber. * Normally, this publisher is consumed by the Websocket/SSE subscription handler and you don't deal with this code directly, but for testing purposes it's useful to use the stream directly. */ @SpringBootTest(classes = {DefaultReviewsService.class, ReviewsDataFetcher.class, DgsAutoConfiguration.class, DateTimeScalar.class}) public class ReviewSubscriptionTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @MockBean ShowsService showsService; @Test void reviewSubscription() { ExecutionResult executionResult = dgsQueryExecutor.execute("subscription { reviewAdded(showId: 1) {starScore} }"); Publisher<ExecutionResult> reviewPublisher = executionResult.getData(); List<Review> reviews = new CopyOnWriteArrayList<>(); reviewPublisher.subscribe(new Subscriber<ExecutionResult>() { @Override public void onSubscribe(Subscription s) { s.request(2); } @Override public void onNext(ExecutionResult executionResult) { if (executionResult.getErrors().size() > 0) { System.out.println(executionResult.getErrors()); } Map<String, Object> review = executionResult.getData(); reviews.add(new ObjectMapper().convertValue(review.get("reviewAdded"), Review.class)); } @Override public void onError(Throwable t) { } @Override public void onComplete() { } }); addReview(); addReview(); assertThat(reviews.size()).isEqualTo(2); } private void addReview() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest() .review( SubmittedReview.newBuilder() .showId(1) .username("testuser") .starScore(5).build()) .build(), new AddReviewProjectionRoot<>() .username() .starScore()); dgsQueryExecutor.execute(graphQLQueryRequest.serialize()); } }
5,030
0
Create_ds/dgs-examples-java/src/test/java/com/example
Create_ds/dgs-examples-java/src/test/java/com/example/demo/ShowsDatafetcherTest.java
package com.example.demo; import com.example.demo.datafetchers.ReviewsDataFetcher; import com.example.demo.datafetchers.ShowsDatafetcher; import com.example.demo.dataloaders.ReviewsDataLoader; import com.example.demo.dataloaders.ReviewsDataLoaderWithContext; import com.example.demo.generated.client.*; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.Show; import com.example.demo.generated.types.SubmittedReview; import com.example.demo.scalars.DateTimeScalar; import com.example.demo.services.DefaultReviewsService; import com.example.demo.services.ShowsService; import com.jayway.jsonpath.TypeRef; import com.netflix.graphql.dgs.DgsQueryExecutor; import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration; import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest; import graphql.ExecutionResult; import org.assertj.core.util.Maps; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import java.time.OffsetDateTime; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.verify; @SpringBootTest(classes = {DgsAutoConfiguration.class, ReviewsDataLoaderWithContext.class, ShowsDatafetcher.class, ReviewsDataFetcher.class, ReviewsDataLoader.class, DateTimeScalar.class}) class ShowsDatafetcherTest { @Autowired DgsQueryExecutor dgsQueryExecutor; @MockBean ShowsService showsService; @MockBean DefaultReviewsService reviewsService; @BeforeEach public void before() { Mockito.when(showsService.shows()) .thenAnswer(invocation -> Collections.singletonList(Show.newBuilder().id(1).title("mock title").releaseYear(2020).build())); Mockito.when(reviewsService.reviewsForShows(Collections.singletonList(1))) .thenAnswer(invocation -> Maps.newHashMap(1, Arrays.asList( Review.newBuilder().username("DGS User").starScore(5).submittedDate(OffsetDateTime.now()).build(), Review.newBuilder().username("DGS User 2").starScore(3).submittedDate(OffsetDateTime.now()).build()) )); } @Test void shows() { List<String> titles = dgsQueryExecutor.executeAndExtractJsonPath( " { shows { title releaseYear }}", "data.shows[*].title"); assertThat(titles).contains("mock title"); } @Test void showsWithException() { Mockito.when(showsService.shows()).thenThrow(new RuntimeException("nothing to see here")); ExecutionResult result = dgsQueryExecutor.execute( " { shows { title releaseYear }}"); assertThat(result.getErrors()).isNotEmpty(); assertThat(result.getErrors().get(0).getMessage()).isEqualTo("java.lang.RuntimeException: nothing to see here"); } @Test void showsWithQueryApi() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(ShowsGraphQLQuery.newRequest().titleFilter("").build(), new ShowsProjectionRoot<>().title()); List<String> titles = dgsQueryExecutor.executeAndExtractJsonPath(graphQLQueryRequest.serialize(), "data.shows[*].title"); assertThat(titles).contains("mock title"); } @Test void showWithReviews() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(ShowsGraphQLQuery.newRequest().titleFilter("").build(), new ShowsProjectionRoot<>() .title() .reviews() .username() .starScore()); List<Show> shows = dgsQueryExecutor.executeAndExtractJsonPathAsObject( graphQLQueryRequest.serialize(), "data.shows[*]", new TypeRef<List<Show>>() { }); assertThat(shows.size()).isEqualTo(1); assertThat(shows.get(0).getReviews().size()).isEqualTo(2); } @Test void addReviewMutation() { GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest( AddReviewGraphQLQuery.newRequest() .review(SubmittedReview.newBuilder() .showId(1) .username("testuser") .starScore(5).build()) .build(), new AddReviewProjectionRoot<>().username().starScore()); ExecutionResult executionResult = dgsQueryExecutor.execute(graphQLQueryRequest.serialize()); assertThat(executionResult.getErrors()).isEmpty(); verify(reviewsService).reviewsForShow(1); } @Test void addReviewsMutation() { List<SubmittedReview> reviews = Collections.singletonList( SubmittedReview.newBuilder().showId(1).username("testuser1").starScore(5).build()); GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest( AddReviewsGraphQLQuery.newRequest() .reviews(reviews) .build(), new AddReviewsProjectionRoot<>().username().starScore()); ExecutionResult executionResult = dgsQueryExecutor.execute(graphQLQueryRequest.serialize()); assertThat(executionResult.getErrors()).isEmpty(); verify(reviewsService).reviewsForShows(Collections.singletonList(1)); } }
5,031
0
Create_ds/dgs-examples-java/src/main/java/com/example
Create_ds/dgs-examples-java/src/main/java/com/example/demo/DemoApplication.java
package com.example.demo; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class DemoApplication { public static void main(String[] args) { SpringApplication.run(DemoApplication.class, args); } /** * Below is an example of using a PreparsedDocumentProvider. * Uncomment to enable */ // @Configuration // static class PreparsedDocumentProviderConfig { // // private final Cache<String, PreparsedDocumentEntry> cache = Caffeine.newBuilder().maximumSize(250) // .expireAfterAccess(5, TimeUnit.MINUTES).recordStats().build(); // // // @Bean // public PreparsedDocumentProvider preparsedDocumentProvider() { // return (executionInput, parseAndValidateFunction) -> { // Function<String, PreparsedDocumentEntry> mapCompute = key -> parseAndValidateFunction.apply(executionInput); // return cache.get(executionInput.getQuery(), mapCompute); // }; // } // } }
5,032
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/directives/UppercaseDirective.java
package com.example.demo.directives; import com.netflix.graphql.dgs.DgsDirective; import graphql.schema.DataFetcher; import graphql.schema.DataFetcherFactories; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLFieldsContainer; import graphql.schema.idl.SchemaDirectiveWiring; import graphql.schema.idl.SchemaDirectiveWiringEnvironment; @DgsDirective(name = "uppercase") public class UppercaseDirective implements SchemaDirectiveWiring { @Override public GraphQLFieldDefinition onField(SchemaDirectiveWiringEnvironment<GraphQLFieldDefinition> env) { GraphQLFieldsContainer fieldsContainer = env.getFieldsContainer(); GraphQLFieldDefinition fieldDefinition = env.getFieldDefinition(); DataFetcher<?> originalDataFetcher = env.getCodeRegistry().getDataFetcher(fieldsContainer, fieldDefinition); DataFetcher<?> dataFetcher = DataFetcherFactories.wrapDataFetcher( originalDataFetcher, (dataFetchingEnvironment, value) -> { if (value instanceof String) { return ((String) value).toUpperCase(); } return value; } ); env.getCodeRegistry().dataFetcher(fieldsContainer, fieldDefinition, dataFetcher); return fieldDefinition; } }
5,033
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/config/SecurityConfig.java
package com.example.demo.config; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configurers.AbstractHttpConfigurer; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.provisioning.InMemoryUserDetailsManager; import org.springframework.security.web.DefaultSecurityFilterChain; import static org.springframework.security.config.Customizer.withDefaults; @Configuration @EnableWebSecurity @EnableGlobalMethodSecurity(prePostEnabled = true, securedEnabled = true) public class SecurityConfig { @Bean DefaultSecurityFilterChain springWebFilterChain(HttpSecurity http) throws Exception { return http .csrf(AbstractHttpConfigurer::disable) .authorizeRequests(requests -> requests .anyRequest().permitAll() ) .httpBasic(withDefaults()) .build(); } @Bean public static InMemoryUserDetailsManager userDetailsService() { User.UserBuilder userBuilder = User.withDefaultPasswordEncoder(); UserDetails user = userBuilder.username("user").password("user").roles("USER").build(); UserDetails admin = userBuilder.username("admin").password("admin").roles("USER", "ADMIN").build(); return new InMemoryUserDetailsManager(user, admin); } }
5,034
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/config/MetricsConfig.java
package com.example.demo.config; import io.micrometer.core.instrument.MeterRegistry; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import io.micrometer.core.instrument.logging.LoggingMeterRegistry; @Configuration public class MetricsConfig { @Bean public MeterRegistry loggingMeterRegistry() { return new LoggingMeterRegistry(); } }
5,035
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/scalars/DateTimeScalar.java
package com.example.demo.scalars; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsRuntimeWiring; import graphql.scalars.ExtendedScalars; import graphql.schema.idl.RuntimeWiring; /** * graphql-java provides optional scalars in the graphql-java-extended-scalars library. * We can wire a scalar from this library by adding the scalar to the RuntimeWiring. */ @DgsComponent public class DateTimeScalar { @DgsRuntimeWiring public RuntimeWiring.Builder addScalar(RuntimeWiring.Builder builder) { return builder.scalar(ExtendedScalars.DateTime); } }
5,036
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/dataloaders/ReviewsDataLoader.java
package com.example.demo.dataloaders; import com.example.demo.generated.types.Review; import com.example.demo.services.DefaultReviewsService; import com.netflix.graphql.dgs.DgsDataLoader; import org.dataloader.MappedBatchLoader; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @DgsDataLoader(name = "reviews") public class ReviewsDataLoader implements MappedBatchLoader<Integer, List<Review>> { private final DefaultReviewsService reviewsService; public ReviewsDataLoader(DefaultReviewsService reviewsService) { this.reviewsService = reviewsService; } /** * This method will be called once, even if multiple datafetchers use the load() method on the DataLoader. * This way reviews can be loaded for all the Shows in a single call instead of per individual Show. */ @Override public CompletionStage<Map<Integer, List<Review>>> load(Set<Integer> keys) { return CompletableFuture.supplyAsync(() -> reviewsService.reviewsForShows(new ArrayList<>(keys))); } }
5,037
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/dataloaders/ReviewsDataLoaderWithContext.java
package com.example.demo.dataloaders; import com.example.demo.generated.types.Review; import com.example.demo.services.DefaultReviewsService; import com.netflix.graphql.dgs.DgsDataLoader; import org.dataloader.BatchLoaderEnvironment; import org.dataloader.MappedBatchLoader; import org.dataloader.MappedBatchLoaderWithContext; import org.springframework.beans.factory.annotation.Autowired; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @DgsDataLoader(name = "reviewsWithContext") public class ReviewsDataLoaderWithContext implements MappedBatchLoaderWithContext<Integer, List<Review>> { private final DefaultReviewsService reviewsService; @Autowired public ReviewsDataLoaderWithContext(DefaultReviewsService reviewsService) { this.reviewsService = reviewsService; } @Override public CompletionStage<Map<Integer, List<Review>>> load(Set<Integer> keys, BatchLoaderEnvironment environment) { return CompletableFuture.supplyAsync(() -> reviewsService.reviewsForShows(new ArrayList<>(keys))); } }
5,038
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/instrumentation/ExampleTracingInstrumentation.java
package com.example.demo.instrumentation; import graphql.ExecutionResult; import graphql.execution.instrumentation.InstrumentationContext; import graphql.execution.instrumentation.InstrumentationState; import graphql.execution.instrumentation.SimpleInstrumentation; import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters; import graphql.schema.DataFetcher; import graphql.schema.GraphQLNonNull; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLOutputType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.util.concurrent.CompletableFuture; @Component public class ExampleTracingInstrumentation extends SimpleInstrumentation { private final static Logger LOGGER = LoggerFactory.getLogger(ExampleTracingInstrumentation.class); @Override public InstrumentationState createState() { return new TracingState(); } @Override public InstrumentationContext<ExecutionResult> beginExecution(InstrumentationExecutionParameters parameters) { TracingState tracingState = parameters.getInstrumentationState(); tracingState.startTime = System.currentTimeMillis(); return super.beginExecution(parameters); } @Override public DataFetcher<?> instrumentDataFetcher(DataFetcher<?> dataFetcher, InstrumentationFieldFetchParameters parameters) { // We only care about user code if(parameters.isTrivialDataFetcher()) { return dataFetcher; } return environment -> { long startTime = System.currentTimeMillis(); Object result = dataFetcher.get(environment); if(result instanceof CompletableFuture) { ((CompletableFuture<?>) result).whenComplete((r, ex) -> { long totalTime = System.currentTimeMillis() - startTime; LOGGER.info("Async datafetcher {} took {}ms", findDatafetcherTag(parameters), totalTime); }); } else { long totalTime = System.currentTimeMillis() - startTime; LOGGER.info("Datafetcher {} took {}ms", findDatafetcherTag(parameters), totalTime); } return result; }; } @Override public CompletableFuture<ExecutionResult> instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) { TracingState tracingState = parameters.getInstrumentationState(); long totalTime = System.currentTimeMillis() - tracingState.startTime; LOGGER.info("Total execution time: {}ms", totalTime); return super.instrumentExecutionResult(executionResult, parameters); } private String findDatafetcherTag(InstrumentationFieldFetchParameters parameters) { GraphQLOutputType type = parameters.getExecutionStepInfo().getParent().getType(); GraphQLObjectType parent; if (type instanceof GraphQLNonNull) { parent = (GraphQLObjectType) ((GraphQLNonNull) type).getWrappedType(); } else { parent = (GraphQLObjectType) type; } return parent.getName() + "." + parameters.getExecutionStepInfo().getPath().getSegmentName(); } static class TracingState implements InstrumentationState { long startTime; } }
5,039
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/ArtworkUploadDataFetcher.java
package com.example.demo.datafetchers; import com.example.demo.generated.types.Image; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsMutation; import com.netflix.graphql.dgs.InputArgument; import org.springframework.web.multipart.MultipartFile; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.UUID; import java.util.stream.Collectors; @DgsComponent public class ArtworkUploadDataFetcher { @DgsMutation public List<Image> addArtwork(@InputArgument Integer showId, @InputArgument MultipartFile upload) throws IOException { Path uploadDir = Paths.get("uploaded-images"); if (!Files.exists(uploadDir)) { Files.createDirectories(uploadDir); } Path newFile = uploadDir.resolve("show-" + showId + "-" + UUID.randomUUID() + upload.getOriginalFilename().substring(upload.getOriginalFilename().lastIndexOf("."))); try (OutputStream outputStream = Files.newOutputStream(newFile)) { outputStream.write(upload.getBytes()); } return Files.list(uploadDir) .filter(f -> f.getFileName().toString().startsWith("show-" + showId)) .map(f -> f.getFileName().toString()) .map(fileName -> Image.newBuilder().url(fileName).build()).collect(Collectors.toList()); } }
5,040
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/ReviewsDataFetcher.java
package com.example.demo.datafetchers; import com.example.demo.dataloaders.ReviewsDataLoader; import com.example.demo.dataloaders.ReviewsDataLoaderWithContext; import com.example.demo.generated.DgsConstants; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.Show; import com.example.demo.generated.types.SubmittedReview; import com.example.demo.services.DefaultReviewsService; import com.netflix.graphql.dgs.*; import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.reactivestreams.Publisher; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; @DgsComponent public class ReviewsDataFetcher { private final DefaultReviewsService reviewsService; public ReviewsDataFetcher(DefaultReviewsService reviewsService) { this.reviewsService = reviewsService; } /** * This datafetcher will be called to resolve the "reviews" field on a Show. * It's invoked for each individual Show, so if we would load 10 shows, this method gets called 10 times. * To avoid the N+1 problem this datafetcher uses a DataLoader. * Although the DataLoader is called for each individual show ID, it will batch up the actual loading to a single method call to the "load" method in the ReviewsDataLoader. * For this to work correctly, the datafetcher needs to return a CompletableFuture. */ @DgsData(parentType = DgsConstants.SHOW.TYPE_NAME, field = DgsConstants.SHOW.Reviews) public CompletableFuture<List<Review>> reviews(DgsDataFetchingEnvironment dfe) { //Instead of loading a DataLoader by name, we can use the DgsDataFetchingEnvironment and pass in the DataLoader classname. DataLoader<Integer, List<Review>> reviewsDataLoader = dfe.getDataLoader(ReviewsDataLoaderWithContext.class); //Because the reviews field is on Show, the getSource() method will return the Show instance. Show show = dfe.getSource(); //Load the reviews from the DataLoader. This call is async and will be batched by the DataLoader mechanism. return reviewsDataLoader.load(show.getId()); } @DgsMutation public List<Review> addReview(@InputArgument SubmittedReview review) { reviewsService.saveReview(review); List<Review> reviews = reviewsService.reviewsForShow(review.getShowId()); return Optional.ofNullable(reviews).orElse(Collections.emptyList()); } @DgsMutation public List<Review> addReviews(@InputArgument(value = "reviews", collectionType = SubmittedReview.class) List<SubmittedReview> reviewsInput) { reviewsService.saveReviews(reviewsInput); List<Integer> showIds = reviewsInput.stream().map(SubmittedReview::getShowId).collect(Collectors.toList()); Map<Integer, List<Review>> reviews = reviewsService.reviewsForShows(showIds); return reviews.values().stream().flatMap(List::stream).collect(Collectors.toList()); } @DgsSubscription public Publisher<Review> reviewAdded(@InputArgument Integer showId) { return reviewsService.getReviewsPublisher(); } }
5,041
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/ShowsDatafetcher.java
package com.example.demo.datafetchers; import com.example.demo.generated.types.Show; import com.example.demo.services.ShowsService; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsQuery; import com.netflix.graphql.dgs.InputArgument; import java.util.List; import java.util.stream.Collectors; @DgsComponent public class ShowsDatafetcher { private final ShowsService showsService; public ShowsDatafetcher(ShowsService showsService) { this.showsService = showsService; } /** * This datafetcher resolves the shows field on Query. * It uses an @InputArgument to get the titleFilter from the Query if one is defined. */ @DgsQuery public List<Show> shows(@InputArgument("titleFilter") String titleFilter) { if (titleFilter == null) { return showsService.shows(); } return showsService.shows().stream().filter(s -> s.getTitle().contains(titleFilter)).collect(Collectors.toList()); } }
5,042
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/datafetchers/SecurityExampleFetchers.java
package com.example.demo.datafetchers; import com.netflix.graphql.dgs.DgsComponent; import com.netflix.graphql.dgs.DgsQuery; import org.springframework.security.access.annotation.Secured; @DgsComponent public class SecurityExampleFetchers { @DgsQuery public String secureNone() { return "Hello to everyone"; } @DgsQuery @Secured({"ROLE_USER", "ROLE_ADMIN"}) public String secureUser() { return "Hello to users or admins"; } @DgsQuery @Secured({"ROLE_ADMIN"}) public String secureAdmin() { return "Hello to admins only"; } }
5,043
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/ShowsServiceImpl.java
package com.example.demo.services; import com.example.demo.generated.types.Show; import org.springframework.stereotype.Service; import java.util.Arrays; import java.util.List; @Service public class ShowsServiceImpl implements ShowsService { @Override public List<Show> shows() { return Arrays.asList( Show.newBuilder().id(1).title("Stranger Things").releaseYear(2016).build(), Show.newBuilder().id(2).title("Ozark").releaseYear(2017).build(), Show.newBuilder().id(3).title("The Crown").releaseYear(2016).build(), Show.newBuilder().id(4).title("Dead to Me").releaseYear(2019).build(), Show.newBuilder().id(5).title("Orange is the New Black").releaseYear(2013).build() ); } }
5,044
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/ReviewsService.java
package com.example.demo.services; public interface ReviewsService { }
5,045
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/ShowsService.java
package com.example.demo.services; import com.example.demo.generated.types.Show; import java.util.List; public interface ShowsService { List<Show> shows(); }
5,046
0
Create_ds/dgs-examples-java/src/main/java/com/example/demo
Create_ds/dgs-examples-java/src/main/java/com/example/demo/services/DefaultReviewsService.java
package com.example.demo.services; import com.example.demo.generated.types.Review; import com.example.demo.generated.types.SubmittedReview; import net.datafaker.Faker; import org.reactivestreams.Publisher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import reactor.core.publisher.ConnectableFlux; import reactor.core.publisher.Flux; import reactor.core.publisher.FluxSink; import jakarta.annotation.PostConstruct; import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * This service emulates a data store. * For convenience in the demo we just generate Reviews in memory, but imagine this would be backed by for example a database. * If this was indeed backed by a database, it would be very important to avoid the N+1 problem, which means we need to use a DataLoader to call this class. */ @Service public class DefaultReviewsService implements ReviewsService { private final static Logger logger = LoggerFactory.getLogger(DefaultReviewsService.class); private final ShowsService showsService; private final Map<Integer, List<Review>> reviews = new ConcurrentHashMap<>(); private FluxSink<Review> reviewsStream; private ConnectableFlux<Review> reviewsPublisher; public DefaultReviewsService(ShowsService showsService) { this.showsService = showsService; } @PostConstruct private void createReviews() { Faker faker = new Faker(); //For each show we generate a random set of reviews. showsService.shows().forEach(show -> { List<Review> generatedReviews = IntStream.range(0, faker.number().numberBetween(1, 20)).mapToObj(number -> { LocalDateTime date = faker.date().past(300, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime(); return Review.newBuilder().submittedDate(OffsetDateTime.of(date, ZoneOffset.UTC)).username(faker.name().username()).starScore(faker.number().numberBetween(0, 6)).build(); }).collect(Collectors.toList()); reviews.put(show.getId(), generatedReviews); }); Flux<Review> publisher = Flux.create(emitter -> { reviewsStream = emitter; }); reviewsPublisher = publisher.publish(); reviewsPublisher.connect(); } /** * Hopefully nobody calls this for multiple shows within a single query, that would indicate the N+1 problem! */ public List<Review> reviewsForShow(Integer showId) { return reviews.get(showId); } /** * This is the method we want to call when loading reviews for multiple shows. * If this code was backed by a relational database, it would select reviews for all requested shows in a single SQL query. */ public Map<Integer, List<Review>> reviewsForShows(List<Integer> showIds) { logger.info("Loading reviews for shows {}", showIds.stream().map(String::valueOf).collect(Collectors.joining(", "))); return reviews .entrySet() .stream() .filter(entry -> showIds.contains(entry.getKey())).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } public void saveReview(SubmittedReview reviewInput) { List<Review> reviewsForShow = reviews.computeIfAbsent(reviewInput.getShowId(), (key) -> new ArrayList<>()); Review review = Review.newBuilder() .username(reviewInput.getUsername()) .starScore(reviewInput.getStarScore()) .submittedDate(OffsetDateTime.now()).build(); reviewsForShow.add(review); reviewsStream.next(review); logger.info("Review added {}", review); } public void saveReviews(List<SubmittedReview> reviewsInput) { reviewsInput.forEach(reviewInput -> { List<Review> reviewsForShow = reviews.computeIfAbsent(reviewInput.getShowId(), (key) -> new ArrayList<>()); Review review = Review.newBuilder() .username(reviewInput.getUsername()) .starScore(reviewInput.getStarScore()) .submittedDate(OffsetDateTime.now()).build(); reviewsForShow.add(review); reviewsStream.next(review); logger.info("Review added {}", review); }); } public Publisher<Review> getReviewsPublisher() { return reviewsPublisher; } }
5,047
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/SuggestionServiceMain.java
package com.airbnb.suggest; import com.airbnb.suggest.rest.RestModule; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; import com.twitter.common.application.AbstractApplication; import com.twitter.common.application.Lifecycle; import com.twitter.common.application.modules.HttpModule; import com.twitter.common.application.modules.LogModule; import com.twitter.common.application.modules.StatsModule; import com.twitter.common.args.Arg; import com.twitter.common.args.CmdLine; import com.twitter.common.args.constraints.NotNull; import com.twitter.common.net.http.GuiceServletConfig; import com.twitter.common.net.http.HttpServerDispatch; import org.mortbay.jetty.servlet.Context; import javax.inject.Inject; import java.util.Arrays; import java.util.logging.Logger; /** * @author Tobi Knaup */ public final class SuggestionServiceMain extends AbstractApplication { @CmdLine(name = "server_set_path", help = "Joins the set of nodes located under this path in ZK") public static final Arg<String> SERVER_SET_PATH = Arg.create("/airbnb/service/suggest"); @NotNull @CmdLine(name = "register_service", help = "Whether this instance should register itself in ZK") public static final Arg<Boolean> REGISTER_SERVICE = Arg.create(true); @Inject private Logger logger; @Inject private Lifecycle lifecycle; @Inject private HttpServerDispatch httpServer; @Inject private GuiceServletConfig servletConfig; @Override public void run() { logger.info("Service started"); addRestSupport(); lifecycle.awaitShutdown(); } @Override public Iterable<? extends Module> getModules() { return Arrays.asList( new HttpModule(), new LogModule(), new RestModule(), new StatsModule() ); } private void addRestSupport() { Context context = httpServer.getRootContext(); context.addFilter(GuiceFilter.class, "/suggest/*", 0); context.addEventListener(servletConfig); } }
5,048
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/model/Place.java
package com.airbnb.suggest.model; /** * A POJO for a place * * @author Tobi Knaup */ public class Place { private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } @Override public String toString() { return String.format("Place{name='%s'}", name); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Place place = (Place) o; if (name != null ? !name.equals(place.name) : place.name != null) return false; return true; } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } }
5,049
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/SuggestionResource.java
package com.airbnb.suggest.rest; import com.airbnb.suggest.model.Place; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Logger; @Path("/suggest/v1") @Produces(MediaType.APPLICATION_JSON) public class SuggestionResource { private final Logger logger; private final List<Place> places; @Inject public SuggestionResource(Logger logger) { this.logger = logger; this.places = new ArrayList<Place>(); } @POST @Path("like") public void like(Place place) { logger.info(place.toString()); if (!places.contains(place)) { places.add(place); } } @GET @Path("suggest") public Place suggest() { Collections.shuffle(places); Place place = places.get(0); logger.info(place.toString()); return place; } }
5,050
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/RestModule.java
package com.airbnb.suggest.rest; import com.airbnb.suggest.rest.util.JsonExceptionMapper; import com.airbnb.suggest.rest.util.RequestStatsFilter; import com.google.inject.Singleton; import com.google.inject.name.Names; import com.google.inject.servlet.ServletModule; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import org.codehaus.jackson.jaxrs.JacksonJsonProvider; import java.util.HashSet; import java.util.Set; /** * Configures our REST service * * @author Tobi Knaup */ public class RestModule extends ServletModule { @Override protected void configureServlets() { super.configureServlets(); // JSON mapper, maps JSON to/from POJOs bind(JacksonJsonProvider.class).in(Singleton.class); // Turns exceptions into JSON responses bind(JsonExceptionMapper.class).in(Singleton.class); // Serve all URLs through Guice serve("/*").with(GuiceContainer.class); // The actual REST Endpoints bind(SuggestionResource.class).in(Singleton.class); // Stats filter("/suggest/v1/like").through(new RequestStatsFilter("suggest_v1_like")); filter("/suggest/v1/all_places").through(new RequestStatsFilter("suggest_v1_all_places")); } }
5,051
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/util/JsonExceptionMapper.java
package com.airbnb.suggest.rest.util; import com.google.common.collect.Maps; import javax.inject.Inject; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; /** * Exception mapper that returns the exception as JSON * * @author Tobi Knaup */ @Provider public class JsonExceptionMapper implements ExceptionMapper<Exception> { private Logger logger; @Inject public JsonExceptionMapper(Logger logger) { this.logger = logger; } public Response toResponse(final Exception e) { String message = e.getMessage(); String type = e.getClass().getSimpleName(); Map<String, String> entity = Maps.newHashMap(); entity.put("type", type); entity.put("message", message); // Log it too logger.log(Level.WARNING, type, e); return Response .status(Response.Status.INTERNAL_SERVER_ERROR) .entity(entity) .type(MediaType.APPLICATION_JSON_TYPE) .build(); } }
5,052
0
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest
Create_ds/twitter-commons-sample/src/main/java/com/airbnb/suggest/rest/util/RequestStatsFilter.java
package com.airbnb.suggest.rest.util; import com.twitter.common.stats.RequestStats; import javax.servlet.*; import java.io.IOException; /** * A servlet filter that uses Twitter Commons request stats to keep tabs on requests * * @author Tobi Knaup */ public final class RequestStatsFilter implements Filter { final RequestStats requestStats; public RequestStatsFilter(String statName) { this.requestStats = new RequestStats(statName); } @Override public void init(FilterConfig filterConfig) throws ServletException { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { Long start = System.nanoTime(); chain.doFilter(request, response); // Takes micros requestStats.requestComplete((System.nanoTime() - start) / 1000); } @Override public void destroy() { } }
5,053
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/UnitTestModule.java
package com.netflix.raigad.configuration; import com.google.inject.AbstractModule; import com.google.inject.Scopes; import com.google.inject.name.Names; import com.netflix.raigad.backup.AbstractRepository; import com.netflix.raigad.backup.S3Repository; import org.junit.Ignore; import org.quartz.SchedulerFactory; import org.quartz.impl.StdSchedulerFactory; @Ignore public class UnitTestModule extends AbstractModule { @Override protected void configure() { bind(IConfiguration.class).toInstance(new FakeConfiguration(FakeConfiguration.FAKE_REGION, "fake-app", "az1", "fakeInstance1")); bind(SchedulerFactory.class).to(StdSchedulerFactory.class).in(Scopes.SINGLETON); bind(AbstractRepository.class).annotatedWith(Names.named("s3")).to(S3Repository.class); } }
5,054
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestAbstractConfigSource.java
package com.netflix.raigad.configuration; import com.google.common.collect.ImmutableList; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import static org.junit.Assert.assertEquals; public class TestAbstractConfigSource { private static final Logger LOGGER = LoggerFactory.getLogger(TestAbstractConfigSource.class.getName()); @Test public void lists() { AbstractConfigSource source = new MemoryConfigSource(); source.set("foo", "bar,baz, qux "); final List<String> values = source.getList("foo"); LOGGER.info("Values {}", values); assertEquals(ImmutableList.of("bar", "baz", "qux"), values); } @Test public void oneItem() { AbstractConfigSource source = new MemoryConfigSource(); source.set("foo", "bar"); final List<String> values = source.getList("foo"); LOGGER.info("Values {}", values); assertEquals(ImmutableList.of("bar"), values); } @Test public void oneItemWithSpace() { AbstractConfigSource source = new MemoryConfigSource(); source.set("foo", "\tbar "); final List<String> values = source.getList("foo"); LOGGER.info("Values {}", values); assertEquals(ImmutableList.of("bar"), values); } }
5,055
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/FakeConfiguration.java
package com.netflix.raigad.configuration; import java.util.List; public class FakeConfiguration implements IConfiguration { public static final String FAKE_REGION = "us-east-1"; public static final String INDEX_METADATA = "[{\"retentionType\":\"daily\",\"retentionPeriod\":5,\"indexName\":\"test_index\",\"preCreate\":\"true\"}]"; public static final String ES_PROCESS_NAME = "org.elasticsearch.bootstrap.Elasticsearch"; public String region; public String appName; public String zone; public String instanceId; public FakeConfiguration() { this(FAKE_REGION, "my_fake_cluster", "my_zone", "i-01234567890123456"); } public FakeConfiguration(String region, String appName, String zone, String instanceId) { this.region = region; this.appName = appName; this.zone = zone; this.instanceId = instanceId; } @Override public void initialize() { } @Override public String getElasticsearchHome() { return null; } @Override public String getYamlLocation() { return null; } @Override public String getBackupLocation() { return "es-backup-test"; } @Override public String getElasticsearchStartupScript() { return null; } @Override public String getElasticsearchStopScript() { return null; } @Override public int getTransportTcpPort() { return 0; } @Override public int getHttpPort() { return 0; } @Override public int getNumOfShards() { return 0; } @Override public int getNumOfReplicas() { return 0; } @Override public int getTotalShardsPerNode() { return 0; } @Override public String getRefreshInterval() { return null; } @Override public boolean isMasterQuorumEnabled() { return false; } @Override public int getMinimumMasterNodes() { return 0; } @Override public String getPingTimeout() { return null; } @Override public boolean isPingMulticastEnabled() { return false; } @Override public String getFdPingInterval() { return null; } @Override public String getFdPingTimeout() { return null; } @Override public String getDataFileLocation() { return null; } @Override public String getLogFileLocation() { return null; } @Override public boolean doesElasticsearchStartManually() { return false; } @Override public String getAppName() { return appName; } @Override public String getRac() { return null; } @Override public List<String> getRacs() { return null; } @Override public String getHostname() { return null; } @Override public String getInstanceName() { return null; } @Override public String getInstanceId() { return null; } @Override public String getDC() { return "us-east-1"; } @Override public void setDC(String dc) { } @Override public String getASGName() { return null; } @Override public String getStackName() { return null; } @Override public String getACLGroupName() { return null; } @Override public String getHostIP() { return null; } @Override public String getHostLocalIP() { return null; } @Override public String getBootClusterName() { return null; } @Override public String getElasticsearchProcessName() { return ES_PROCESS_NAME; } @Override public String getElasticsearchDiscoveryType() { return null; } @Override public boolean isMultiDC() { return false; } @Override public String getIndexRefreshInterval() { return null; } @Override public String getClusterRoutingAttributes() { return null; } @Override public boolean isAsgBasedDedicatedDeployment() { return false; } @Override public boolean isCustomShardAllocationPolicyEnabled() { return false; } @Override public String getClusterShardAllocationAttribute() { return null; } @Override public String getExtraConfigParams() { return null; } @Override public String getEsKeyName(String escarKey) { return null; } @Override public boolean isDebugEnabled() { return false; } @Override public boolean isShardPerNodeEnabled() { return false; } @Override public boolean isIndexAutoCreationEnabled() { return false; } @Override public String getIndexMetadata() { return INDEX_METADATA; } @Override public int getAutoCreateIndexTimeout() { return 3000; } @Override public int getAutoCreateIndexInitialStartDelaySeconds() { return 0; } @Override public int getAutoCreateIndexScheduleMinutes() { return 0; } @Override public boolean isSnapshotBackupEnabled() { return false; } @Override public String getCommaSeparatedIndicesToBackup() { return "_all"; } @Override public boolean partiallyBackupIndices() { return false; } @Override public boolean includeGlobalStateDuringBackup() { return false; } @Override public boolean waitForCompletionOfBackup() { return true; } @Override public boolean includeIndexNameInSnapshot() { return false; } @Override public boolean isHourlySnapshotEnabled() { return false; } @Override public long getBackupCronTimerInSeconds() { return 0; } @Override public int getBackupHour() { return 0; } @Override public boolean isRestoreEnabled() { return false; } @Override public String getRestoreRepositoryName() { return null; } @Override public String getRestoreSourceClusterName() { return "fake-app"; } @Override public String getRestoreSourceRepositoryRegion() { return null; } @Override public String getRestoreLocation() { return null; } @Override public String getRestoreRepositoryType() { return null; } @Override public String getRestoreSnapshotName() { return null; } @Override public String getCommaSeparatedIndicesToRestore() { return null; } @Override public int getRestoreTaskInitialDelayInSeconds() { return 0; } @Override public boolean amITribeNode() { return false; } @Override public boolean amIWriteEnabledTribeNode() { return false; } @Override public boolean amIMetadataEnabledTribeNode() { return false; } @Override public String getCommaSeparatedSourceClustersForTribeNode() { return null; } @Override public boolean amISourceClusterForTribeNode() { return false; } @Override public String getCommaSeparatedTribeClusterNames() { return null; } @Override public boolean isNodeMismatchWithDiscoveryEnabled() { return false; } @Override public int getDesiredNumberOfNodesInCluster() { return 0; } @Override public boolean isEurekaHealthCheckEnabled() { return false; } @Override public boolean isLocalModeEnabled() { return false; } @Override public String getCassandraKeyspaceName() { return null; } @Override public int getCassandraThriftPortForAstyanax() { return 0; } @Override public boolean isEurekaHostSupplierEnabled() { return false; } @Override public String getCommaSeparatedCassandraHostNames() { return null; } @Override public boolean isSecurityGroupInMultiDC() { return false; } @Override public boolean isKibanaSetupRequired() { return false; } @Override public int getKibanaPort() { return 0; } public boolean amISourceClusterForTribeNodeInMultiDC() { return false; } @Override public boolean reportMetricsFromMasterOnly() { return false; } @Override public String getTribePreferredClusterIdOnConflict() { return null; } @Override public String getEsNodeName() { return null; } @Override public boolean isDeployedInVPC() { return false; } @Override public boolean isVPCExternal() { return false; } @Override public String getACLGroupNameForVPC() { return null; } @Override public String getACLGroupIdForVPC() { return null; } @Override public void setACLGroupIdForVPC(String aclGroupIdForVPC) { } @Override public String getMacIdForInstance() { return null; } }
5,056
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestSystemPropertiesConfigSource.java
package com.netflix.raigad.configuration; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestSystemPropertiesConfigSource { @Test public void read() { final String key = "java.version"; SystemPropertiesConfigSource configSource = new SystemPropertiesConfigSource(); configSource.initialize("asgName", "region"); // sys props are filtered to starting with escar, so this should be missing. assertEquals(null, configSource.get(key)); assertEquals(0, configSource.size()); } }
5,057
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestCompositeConfigSource.java
package com.netflix.raigad.configuration; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestCompositeConfigSource { @Test public void read() { MemoryConfigSource memoryConfigSource = new MemoryConfigSource(); IConfigSource configSource = new CompositeConfigSource(memoryConfigSource); configSource.initialize("foo", "bar"); assertEquals(0, configSource.size()); configSource.set("foo", "bar"); assertEquals(1, configSource.size()); assertEquals("bar", configSource.get("foo")); // verify that the writes went to mem source. assertEquals(1, memoryConfigSource.size()); assertEquals("bar", memoryConfigSource.get("foo")); } @Test public void readMultiple() { MemoryConfigSource m1 = new MemoryConfigSource(); m1.set("foo", "foo"); MemoryConfigSource m2 = new MemoryConfigSource(); m2.set("bar", "bar"); MemoryConfigSource m3 = new MemoryConfigSource(); m3.set("baz", "baz"); IConfigSource configSource = new CompositeConfigSource(m1, m2, m3); assertEquals(3, configSource.size()); assertEquals("foo", configSource.get("foo")); assertEquals("bar", configSource.get("bar")); assertEquals("baz", configSource.get("baz")); // read default assertEquals("test", configSource.get("doesnotexist", "test")); } }
5,058
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/configuration/TestPropertiesConfigSource.java
package com.netflix.raigad.configuration; import org.junit.Test; import static org.junit.Assert.assertEquals; public class TestPropertiesConfigSource { @Test public void readFile() { PropertiesConfigSource configSource = new PropertiesConfigSource("conf/raigad.properties"); configSource.initialize("asgName", "region"); assertEquals("\"/tmp/data\"", configSource.get("Raigad.path.data")); assertEquals(9001, configSource.get("Raigad.transport.tcp.port", 0)); // File has 5 lines, but line 6 is "Raigad.http.port9002", so it gets filtered out with empty string check. assertEquals(4, configSource.size()); } @Test public void updateKey() { PropertiesConfigSource configSource = new PropertiesConfigSource("conf/raigad.properties"); configSource.initialize("asgName", "region"); // File has 5 lines, but line 2 is "escar.http.port9002", so it gets filtered out with empty string check. assertEquals(4, configSource.size()); configSource.set("foo", "bar"); assertEquals(5, configSource.size()); assertEquals("bar", configSource.get("foo")); assertEquals(9001, configSource.get("Raigad.transport.tcp.port", 0)); configSource.set("Raigad.transport.tcp.port", Integer.toString(10)); assertEquals(10, configSource.get("Raigad.transport.tcp.port", 0)); } }
5,059
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/resources/TestElasticsearchConfig.java
package com.netflix.raigad.resources; import com.netflix.raigad.configuration.CustomConfigSource; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.identity.InstanceManager; import com.netflix.raigad.identity.RaigadInstance; import com.netflix.raigad.startup.RaigadServer; import com.netflix.raigad.utils.TribeUtils; import org.junit.Before; import org.junit.Test; import javax.ws.rs.core.Response; import java.util.Collections; import java.util.List; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; public class TestElasticsearchConfig { private TribeUtils tribeUtils; private IConfiguration config; @Before public void setUp() { tribeUtils = mock(TribeUtils.class); config = mock(IConfiguration.class); } @Test public void getNodes() { RaigadInstance raigadInstance1 = new RaigadInstance(); raigadInstance1.setApp("fake-app1"); RaigadInstance raigadInstance2 = new RaigadInstance(); raigadInstance2.setApp("fake-app2"); RaigadInstance raigadInstance3 = new RaigadInstance(); raigadInstance3.setApp("fake-app3"); final List<RaigadInstance> nodes = asList(raigadInstance1, raigadInstance2, raigadInstance3); InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenReturn(nodes); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(200, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } @Test public void getNodes_notFound() { InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenReturn(Collections.emptyList()); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(200, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } @Test public void getNodes_Error() { InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenReturn(null); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(500, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } @Test public void getNodes_handlesUnknownHostException() { InstanceManager instanceManager = mock(InstanceManager.class); when(instanceManager.getAllInstances()).thenThrow(new RuntimeException()); RaigadServer raigadServer = mock(RaigadServer.class); when(raigadServer.getInstanceManager()).thenReturn(instanceManager); ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(raigadServer, tribeUtils, new CustomConfigSource(), config); Response response = elasticsearchConfig.getNodes(); assertEquals(500, response.getStatus()); verify(raigadServer, times(1)).getInstanceManager(); verify(instanceManager, times(1)).getAllInstances(); } }
5,060
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/scheduler/TestGuiceSingleton.java
package com.netflix.raigad.scheduler; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Singleton; import org.junit.Test; public class TestGuiceSingleton { public static class GModules extends AbstractModule { @Override protected void configure() { bind(EmptyInterface.class).to(GuiceSingleton.class).asEagerSingleton(); } } public interface EmptyInterface { public String print(); } @Singleton public static class GuiceSingleton implements EmptyInterface { public String print() { System.out.println(this.toString()); return this.toString(); } } @Test public void testSingleton() { Injector injector = Guice.createInjector(new GModules()); injector.getInstance(EmptyInterface.class).print(); injector.getInstance(EmptyInterface.class).print(); injector.getInstance(EmptyInterface.class).print(); printInjected(); printInjected(); printInjected(); printInjected(); } public void printInjected() { Injector injector = Guice.createInjector(new GModules()); injector.getInstance(EmptyInterface.class).print(); } }
5,061
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/scheduler/TestScheduler.java
package com.netflix.raigad.scheduler; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.configuration.UnitTestModule; import org.junit.Ignore; import org.junit.Test; import javax.management.MBeanServerFactory; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; public class TestScheduler { private static CountDownLatch latch; @Test public void testSchedule() throws Exception { latch = new CountDownLatch(1); Injector inject = Guice.createInjector(new UnitTestModule()); RaigadScheduler scheduler = inject.getInstance(RaigadScheduler.class); scheduler.start(); scheduler.addTask("test", TestTask.class, new SimpleTimer("testtask", 10)); // verify the task has run or fail in 1s latch.await(1000, TimeUnit.MILLISECONDS); scheduler.shutdown(); } @Test public void testSingleInstanceSchedule() throws Exception { latch = new CountDownLatch(3); Injector inject = Guice.createInjector(new UnitTestModule()); RaigadScheduler scheduler = inject.getInstance(RaigadScheduler.class); scheduler.start(); scheduler.addTask("test2", SingleTestTask.class, SingleTestTask.getTimer()); // verify 3 tasks run or fail in 1s latch.await(4000, TimeUnit.MILLISECONDS); scheduler.shutdown(); assertEquals(3, SingleTestTask.count); } @Ignore public static class TestTask extends Task { @Inject public TestTask(IConfiguration config) { // todo: mock the MBeanServer instead, but this will prevent exceptions due to duplicate registrations super(config, MBeanServerFactory.newMBeanServer()); } @Override public void execute() { latch.countDown(); } @Override public String getName() { return "test"; } } @Ignore @Singleton public static class SingleTestTask extends Task { @Inject public SingleTestTask(IConfiguration config) { super(config, MBeanServerFactory.newMBeanServer()); } public static int count = 0; @Override public void execute() { ++count; latch.countDown(); try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } } @Override public String getName() { return "test2"; } public static TaskTimer getTimer() { return new SimpleTimer("test2", 11L); } } }
5,062
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/utils/FakeSleeper.java
package com.netflix.raigad.utils; public class FakeSleeper implements Sleeper { @Override public void sleep(long waitTimeMs) throws InterruptedException { // no-op } public void sleepQuietly(long waitTimeMs) { //no-op } }
5,063
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/utils/TestElasticsearchUtils.java
package com.netflix.raigad.utils; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.identity.RaigadInstance; import mockit.Expectations; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; import org.json.simple.JSONObject; import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.List; public class TestElasticsearchUtils { @org.mockito.Mock @Mocked IConfiguration config; @Test public void TestInstanceToJson() { System.out.println("Starting a test..."); List<RaigadInstance> instances = getRaigadInstances(); JSONObject jsonInstances = ElasticsearchUtils.transformRaigadInstanceToJson(instances); System.out.println(jsonInstances); List<RaigadInstance> returnedInstances = ElasticsearchUtils.getRaigadInstancesFromJson(jsonInstances); System.out.println("Number of returned instances = " + returnedInstances.size()); for (RaigadInstance raigadInstance : returnedInstances) { System.out.println("-->" + raigadInstance); } } @Test public void TestAmIMasterNode() throws Exception { String expectedIp = "100.0.0.1"; new Expectations() { { config.getHostIP(); result = expectedIp; times = 1; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return expectedIp; } }; Assert.assertTrue(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeWithWhitespace() throws Exception { String expectedIp = "100.0.0.1"; new Expectations() { { config.getHostIP(); result = expectedIp; times = 1; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return expectedIp + " \n "; } }; Assert.assertTrue(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeExternalIp() throws Exception { String expectedLocalIp = "100.0.0.1"; String expectedExternalIp = "54.0.0.1"; new Expectations() { { config.getHostIP(); result = expectedExternalIp; times = 1; config.getHostLocalIP(); result = expectedLocalIp; times = 1; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return expectedLocalIp; } }; Assert.assertTrue(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeNegative() throws Exception { String expectedIp = "100.0.0.1"; String returnedIp = "100.0.0.2"; new Expectations() { { config.getHostIP(); result = expectedIp; times = 1; config.getHostLocalIP(); result = expectedIp; times = 1; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return returnedIp; } }; Assert.assertFalse(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeNegativeNull() throws Exception { new Expectations() { { config.getHostIP(); times = 0; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return null; } }; Assert.assertFalse(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } @Test public void TestAmIMasterNodeNegativeEmpty() throws Exception { new Expectations() { { config.getHostIP(); times = 0; config.getHostLocalIP(); times = 0; } }; new MockUp<SystemUtils>() { @Mock String runHttpGetCommand(String url) { return ""; } }; Assert.assertFalse(ElasticsearchUtils.amIMasterNode(config, new HttpModule(config))); } public static List<RaigadInstance> getRaigadInstances() { List<RaigadInstance> instances = new ArrayList<RaigadInstance>(); for (int i = 0; i < 3; i++) { RaigadInstance raigadInstance = new RaigadInstance(); raigadInstance.setApp("cluster-" + i); raigadInstance.setAvailabilityZone("1d"); raigadInstance.setDC("us-east1"); raigadInstance.setHostIP("127.0.0." + i); raigadInstance.setHostName("host-" + i); raigadInstance.setId("id-" + i); raigadInstance.setInstanceId("instance-" + i); raigadInstance.setUpdatetime(12345567); instances.add(raigadInstance); } return instances; } }
5,064
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/utils/TestElasticsearchProcessMonitor.java
package com.netflix.raigad.utils; import com.netflix.raigad.configuration.FakeConfiguration; import com.netflix.raigad.configuration.IConfiguration; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.management.ObjectName; import java.io.IOException; import java.io.InputStream; import java.lang.management.ManagementFactory; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.*; public class TestElasticsearchProcessMonitor { private static String ELASTICSEARCH_PROCESS_NAME = FakeConfiguration.ES_PROCESS_NAME; private Process pgrepProcess; private InputStream processInputStream; private ElasticsearchProcessMonitor elasticsearchProcessMonitor; @Before public void setUp() throws IOException { processInputStream = mock(InputStream.class); pgrepProcess = mock(Process.class); when(pgrepProcess.getInputStream()).thenReturn(processInputStream); Runtime runtime = mock(Runtime.class); when(runtime.exec(anyString())).thenReturn(pgrepProcess); elasticsearchProcessMonitor = spy(new ElasticsearchProcessMonitor(mock(IConfiguration.class))); doReturn(runtime).when(elasticsearchProcessMonitor).getRuntime(); } @After public void cleanUp() throws Exception { ManagementFactory.getPlatformMBeanServer().unregisterMBean( new ObjectName("com.netflix.raigad.scheduler:type=" + ElasticsearchProcessMonitor.class.getName())); ElasticsearchProcessMonitor.isElasticsearchRunningNow.set(false); ElasticsearchProcessMonitor.wasElasticsearchStarted.set(false); } @Test public void testNullInputStream() throws Exception { doReturn(null).when(elasticsearchProcessMonitor).getFirstLine(processInputStream); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertFalse(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertFalse(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } @Test public void testEmptyInputStream() throws Exception { doReturn("").when(elasticsearchProcessMonitor).getFirstLine(processInputStream); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertFalse(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertFalse(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } @Test public void testValidInputStream() throws Exception { doReturn("1234").when(elasticsearchProcessMonitor).getFirstLine(processInputStream); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertTrue(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertTrue(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } @Test public void testElasticsearchWasStarted() throws Exception { doReturn("").when(elasticsearchProcessMonitor).getFirstLine(processInputStream); ElasticsearchProcessMonitor.isElasticsearchRunningNow.set(true); ElasticsearchProcessMonitor.wasElasticsearchStarted.set(true); elasticsearchProcessMonitor.checkElasticsearchProcess(ELASTICSEARCH_PROCESS_NAME); verify(processInputStream, times(1)).close(); verify(pgrepProcess, times(1)).destroyForcibly(); Assert.assertFalse(ElasticsearchProcessMonitor.isElasticsearchRunning()); Assert.assertTrue(ElasticsearchProcessMonitor.getWasElasticsearchStarted()); } }
5,065
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/defaultimpl/TestStandardTuner.java
package com.netflix.raigad.defaultimpl; import com.google.common.io.Files; import com.netflix.raigad.configuration.FakeConfiguration; import com.netflix.raigad.configuration.IConfiguration; import org.junit.Before; import org.junit.Test; import java.io.File; import java.io.IOException; public class TestStandardTuner { private IConfiguration config; private StandardTuner tuner; @Before public void setup() { config = new FakeConfiguration(); tuner = new StandardTuner(config); } @Test public void dump() throws IOException { String target = "/tmp/raigad_test.yaml"; Files.copy(new File("src/test/resources/elasticsearch.yml"), new File("/tmp/raigad_test.yaml")); tuner.writeAllProperties(target, "your_host"); } }
5,066
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/defaultimpl/TestElasticsearchProcessManager.java
package com.netflix.raigad.defaultimpl; import com.netflix.raigad.configuration.FakeConfiguration; import com.netflix.raigad.configuration.IConfiguration; import org.junit.Before; import org.junit.Test; import java.io.IOException; import static junit.framework.TestCase.assertEquals; import static org.junit.Assert.assertTrue; public class TestElasticsearchProcessManager { private ElasticsearchProcessManager elasticSearchProcessManager; @Before public void setup() { IConfiguration config = new FakeConfiguration("us-east-1", "test_cluster", "us-east-1a", "i-1234afd3"); elasticSearchProcessManager = new ElasticsearchProcessManager(config); } @Test public void logProcessOutput_BadApp() throws IOException, InterruptedException { Process p = null; try { p = new ProcessBuilder("ls", "/tmppppp").start(); int exitValue = p.waitFor(); assertTrue(0 != exitValue); elasticSearchProcessManager.logProcessOutput(p); } catch (IOException ioe) { if (p != null) { elasticSearchProcessManager.logProcessOutput(p); } } } /** * Note: this will succeed on a *nix machine, unclear about anything else... */ @Test public void logProcessOutput_GoodApp() throws IOException, InterruptedException { Process p = new ProcessBuilder("true").start(); int exitValue = p.waitFor(); assertEquals(0, exitValue); elasticSearchProcessManager.logProcessOutput(p); } }
5,067
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/dataobjects/TestMasterNodeInfoMapper.java
package com.netflix.raigad.dataobjects; import com.netflix.raigad.objectmapper.DefaultMasterNodeInfoMapper; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.junit.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; /* [ { "id":"8sZZWYmmQaeNUKMq1S1uow", "host":"es-test-useast1d-master-i-9e112345", "ip":"10.111.22.333", "node":"us-east-1d.i-9e112345" } ] */ public class TestMasterNodeInfoMapper { ObjectMapper mapper = new DefaultMasterNodeInfoMapper(); @Test public void testMasterNodeInformationObject() throws IOException { String masterNodeInfo = "[{\"id\":\"8sZZWYmmQaeNUKMq1S1uow\",\"host\":\"es-test-useast1d-master-i-9e112345\",\"ip\":\"10.111.22.333\",\"node\":\"us-east-1d.i-9e112345\"}]"; try { List<MasterNodeInformation> myObjs = mapper.readValue(masterNodeInfo, new TypeReference<ArrayList<MasterNodeInformation>>() { }); assertEquals(1, myObjs.size()); for (MasterNodeInformation key : myObjs) { assertEquals("8sZZWYmmQaeNUKMq1S1uow", key.getId()); assertEquals("es-test-useast1d-master-i-9e112345", key.getHost()); assertEquals("10.111.22.333", key.getIp()); assertEquals("us-east-1d.i-9e112345", key.getNode()); } } catch (JsonGenerationException e) { e.printStackTrace(); } catch (JsonMappingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } }
5,068
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/backup/TestBackupRestore.java
package com.netflix.raigad.backup; import com.google.inject.Guice; import com.google.inject.Injector; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.configuration.UnitTestModule; import com.netflix.raigad.utils.ElasticsearchTransportClient; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; import org.apache.commons.io.FileUtils; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.junit.*; import java.io.File; import java.io.IOException; import java.util.List; /** * Reference:https://github.com/elasticsearch/elasticsearch-cloud-aws/blob/es-1.1/src/test/java/org/elasticsearch/repositories/s3/S3SnapshotRestoreTest.java * <p> * Following tests do not test S3 cloud functionality but uses fs (file system) locally to run Snapshot and Backup * TODO: Need to fix for S3 functionality */ /* { "20140331": { "type": "s3", "settings": { "region": "us-east-1", "base_path": "es_test/20140331", "bucket": "es-backup-test" } }, "20140410": { "type": "s3", "settings": { "region": "us-east-1", "base_path": "es_test/20140410", "bucket": "es-backup-test" } } } */ @Ignore @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 2) public class TestBackupRestore extends ESIntegTestCase { private static final char PATH_SEP = File.separatorChar; public static String repositoryName = ""; public static String repositoryLocation = ""; public static String LOCAL_DIR = "data"; private static Injector injector; public static Client client0; @Mocked private static ElasticsearchTransportClient esTransportClient; private static IConfiguration configuration; private static S3RepositorySettingsParams s3RepositorySettingsParams; private static S3Repository s3Repository; @Mocked private static SnapshotBackupManager snapshotBackupManager; @Mocked private static RestoreBackupManager restoreBackupManager; @Before public final void setup() throws IOException { System.out.println("Running setup now..."); injector = Guice.createInjector(new UnitTestModule()); configuration = injector.getInstance(IConfiguration.class); s3RepositorySettingsParams = injector.getInstance(S3RepositorySettingsParams.class); esTransportClient = injector.getInstance(ElasticsearchTransportClient.class); s3Repository = injector.getInstance(S3Repository.class); if (snapshotBackupManager == null) { snapshotBackupManager = injector.getInstance(SnapshotBackupManager.class); } if (restoreBackupManager == null) { restoreBackupManager = injector.getInstance(RestoreBackupManager.class); } wipeRepositories(); cleanupDir(LOCAL_DIR, null); } @After public final void wipeAfter() throws IOException { System.out.println("Running wipeAfter ..."); wipeRepositories(); injector = null; configuration = null; s3RepositorySettingsParams = null; s3Repository = null; esTransportClient = null; client0 = null; cleanupDir(LOCAL_DIR, null); } @Test public void testSimpleWorkflow() throws Exception { client0 = client(); repositoryName = s3Repository.getRemoteRepositoryName(); //Create S3 Repository Assert.assertFalse(s3Repository.createOrGetSnapshotRepository() == null); createIndex("test-idx-1", "test-idx-3"); ensureGreen(); logger.info("--> indexing some data"); for (int i = 0; i < 100; i++) { index("test-idx-1", "doc", Integer.toString(i), "foo", "bar" + i); index("test-idx-3", "doc", Integer.toString(i), "foo", "baz" + i); } refresh(); Assert.assertEquals(client0.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), 100L); Assert.assertEquals(client0.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), 100L); //Run backup snapshotBackupManager.runSnapshotBackup(); Assert.assertEquals( client0.admin().cluster().prepareGetSnapshots(repositoryName).setSnapshots( snapshotBackupManager.getSnapshotName("_all", false)) .get().getSnapshots().get(0).state(), SnapshotState.SUCCESS); logger.info("--> delete some data"); for (int i = 0; i < 50; i++) { client0.prepareDelete("test-idx-1", "doc", Integer.toString(i)).get(); } for (int i = 0; i < 100; i += 2) { client0.prepareDelete("test-idx-3", "doc", Integer.toString(i)).get(); } refresh(); Assert.assertEquals(client0.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), 50L); Assert.assertEquals(client0.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), 50L); logger.info("--> close indices"); client0.admin().indices().prepareClose("test-idx-1", "test-idx-3").get(); logger.info("--> restore all indices from the snapshot"); restoreBackupManager.runRestore(repositoryName, "fs", snapshotBackupManager.getSnapshotName("_all", false), null, null, null); ensureGreen(); Assert.assertEquals(client0.prepareSearch("test-idx-1").setSize(0).get().getHits().getTotalHits(), 100L); Assert.assertEquals(client0.prepareSearch("test-idx-3").setSize(0).get().getHits().getTotalHits(), 100L); } @Ignore public static class MockElasticsearchTransportClient extends MockUp<ElasticsearchTransportClient>{ @Mock public static ElasticsearchTransportClient instance(IConfiguration config) { return esTransportClient; } @Mock public Client getTransportClient() { return client0; } } @Ignore public static class MockS3Repository extends MockUp<S3Repository> { @Mock public PutRepositoryResponse getPutRepositoryResponse(Client esTransportClient, String s3RepoName) { String localRepositoryLocation = LOCAL_DIR + PATH_SEP + s3RepositorySettingsParams.getBase_path(); PutRepositoryResponse putRepositoryResponse = client0.admin().cluster() .preparePutRepository(repositoryName) .setType(AbstractRepository.RepositoryType.fs.name()) .setSettings(Settings.builder().put("location", localRepositoryLocation)) .get(); //Setting local repository location repositoryLocation = localRepositoryLocation; return putRepositoryResponse; } } @Ignore public static class MockSnapshotBackupManager extends MockUp<SnapshotBackupManager> { @Mock public CreateSnapshotResponse getCreateSnapshotResponse(Client esTransportClient, String repositoryName, String snapshotName) { return client0.admin().cluster().prepareCreateSnapshot(repositoryName, snapshotName) .setWaitForCompletion(configuration.waitForCompletionOfBackup()) .setIndices(configuration.getCommaSeparatedIndicesToBackup()) .setIncludeGlobalState(configuration.includeGlobalStateDuringBackup()) .setPartial(configuration.partiallyBackupIndices()).get(); } } @Ignore public static class MockRestoreBackupManager extends MockUp<RestoreBackupManager> { @Mock public RestoreSnapshotResponse getRestoreSnapshotResponse( Client esTransportClient, String commaSeparatedIndices, String restoreRepositoryName, String snapshotN) { snapshotN = snapshotBackupManager.getSnapshotName("_all", false); return client0.admin().cluster().prepareRestoreSnapshot(repositoryName, snapshotN) .setIndices("test-idx-*") .setWaitForCompletion(true) .execute() .actionGet(); } } public static void cleanupDir(String dirPath, List<String> childDirs) throws IOException { if (childDirs == null || childDirs.size() == 0) { FileUtils.cleanDirectory(new File(dirPath)); } else { for (String childDir : childDirs) { FileUtils.cleanDirectory(new File(dirPath + "/" + childDir)); } } } /** * Deletes repositories, supports wildcard notation. */ public static void wipeRepositories(String... repositories) { // if nothing is provided, delete all if (repositories.length == 0) { repositories = new String[]{"*"}; } for (String repository : repositories) { try { client().admin().cluster().prepareDeleteRepository(repository).execute().actionGet(); } catch (RepositoryMissingException ex) { // ignore } } } }
5,069
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestIndexNameFilter.java
package com.netflix.raigad.indexmanagement; import com.netflix.raigad.indexmanagement.indexfilters.DatePatternIndexNameFilter; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class TestIndexNameFilter { @Test public void testWrongPrefix() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("foo2018")); } @Test public void testYearlyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018")); } @Test public void testYearlyPatternWithYYYYMM() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd201802")); } @Test public void testMonthlyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd201802")); } @Test public void testMonthlyPatternWithSingleDigitMonth() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd20182")); } @Test public void testMonthlyPatternWithYYYYMMdd() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd20180203")); } @Test public void testWeeklyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'-YYYY-ww"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd-2018-51")); } @Test public void testWeeklyPatternInvalid() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'-YYYY-ww"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd-2018-53")); } @Test public void testDailyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMdd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd20180203")); } @Test public void testHalfDayPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'-YYYY-MM-dd-aa"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd-2018-02-03-AM")); assertTrue(filter.filter("abcd-2018-02-03-PM")); assertFalse(filter.filter("abcd-2018-02-03-BC")); } @Test public void testHourlyPattern() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMddHH"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018020323")); } @Test public void testHourlyPatternInvalidHour() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMddHH"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertFalse(filter.filter("abcd2018020328")); } @Test public void testPatternWithDashes() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY-MM-dd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018-02-27")); } @Test public void testPatternWithDots() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY.MM.dd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018.02.27")); } @Test public void testPatternWithSuffix() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY-MM-dd'ghi'"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2018-02-27ghi")); } @Test public void testHourlyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMddHH"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2013120300")); assertTrue(filter.filter("abcd2013120301")); assertTrue(filter.filter("abcd2013120312")); assertTrue(filter.filter("abcd2013120323")); assertFalse(filter.filter("abcd12013120323")); assertFalse(filter.filter("abcd2013120324")); assertFalse(filter.filter("abcd2013120345")); assertFalse(filter.filter("abcd20231248")); assertFalse(filter.filter("_abc")); } @Test public void testDailyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMMdd"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd20131203")); assertFalse(filter.filter("abcd120131203")); assertFalse(filter.filter("abcd20231248")); assertFalse(filter.filter("abcd202312")); assertFalse(filter.filter("_abc")); } @Test public void testMonthlyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYYMM"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd202312")); assertFalse(filter.filter("abcd1202312")); assertFalse(filter.filter("abcd20131203")); assertFalse(filter.filter("_abc")); System.out.println(formatter.parseDateTime("abcd20231")); assertFalse(filter.filter("abcd20231")); assertFalse(filter.filter("abcd202313")); assertFalse(filter.filter("abcd20231248")); } @Test public void testYearlyIndexNameFilter() { DateTimeFormatter formatter = DateTimeFormat.forPattern("'abcd'YYYY"); IIndexNameFilter filter = new DatePatternIndexNameFilter(formatter); assertTrue(filter.filter("abcd2023")); assertFalse(filter.filter("abcd20131203")); assertFalse(filter.filter("_abc")); assertFalse(filter.filter("abcd202")); } }
5,070
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestElasticsearchIndexManager.java
package com.netflix.raigad.indexmanagement; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.indexmanagement.exception.UnsupportedAutoIndexException; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.client.Client; import org.joda.time.DateTime; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.management.ObjectName; import java.io.IOException; import java.lang.management.ManagementFactory; import java.util.*; import static org.mockito.Mockito.*; public class TestElasticsearchIndexManager { private static final int AUTO_CREATE_INDEX_TIMEOUT = 300000; private Client elasticsearchClient; private IConfiguration config; private ElasticsearchIndexManager elasticsearchIndexManager; @Before public void setUp() throws Exception { config = mock(IConfiguration.class); when(config.getAutoCreateIndexTimeout()).thenReturn(AUTO_CREATE_INDEX_TIMEOUT); elasticsearchClient = mock(Client.class); elasticsearchIndexManager = spy(new ElasticsearchIndexManager(config, null)); doReturn(elasticsearchClient).when(elasticsearchIndexManager).getTransportClient(); doNothing().when(elasticsearchIndexManager).deleteIndices(eq(elasticsearchClient), anyString(), anyInt()); } @Test public void testRunIndexManagement_NotActionable_NoIndex() throws Exception { String serializedIndexMetadata = "[{\"retentionType\": \"yearly\", \"retentionPeriod\": 20}]"; when(config.getIndexMetadata()).thenReturn(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2018", new IndexStats("nf_errors_log2018", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(); verify(elasticsearchIndexManager, times(0)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); verify(elasticsearchIndexManager, times(0)).preCreateIndex(any(Client.class), any(IndexMetadata.class), any(DateTime.class)); } @Test public void testRunIndexManagement_NotActionable_NoRetentionPeriod() throws Exception { String serializedIndexMetadata = "[{\"retentionType\": \"yearly\", \"indexName\": \"nf_errors_log\"}]"; when(config.getIndexMetadata()).thenReturn(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2018", new IndexStats("nf_errors_log2018", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(); verify(elasticsearchIndexManager, times(0)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); verify(elasticsearchIndexManager, times(0)).preCreateIndex(any(Client.class), any(IndexMetadata.class), any(DateTime.class)); } @Test public void testRunIndexManagement() throws Exception { String serializedIndexMetadata = "[{\"retentionType\": \"yearly\", \"retentionPeriod\": 3, \"indexName\": \"nf_errors_log\"}]"; when(config.getIndexMetadata()).thenReturn(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2018", new IndexStats("nf_errors_log2018", new ShardStats[]{})); indexStats.put("nf_errors_log2017", new IndexStats("nf_errors_log2017", new ShardStats[]{})); indexStats.put("nf_errors_log2016", new IndexStats("nf_errors_log2016", new ShardStats[]{})); indexStats.put("nf_errors_log2015", new IndexStats("nf_errors_log2015", new ShardStats[]{})); indexStats.put("nf_errors_log2014", new IndexStats("nf_errors_log2014", new ShardStats[]{})); indexStats.put("nf_errors_log2013", new IndexStats("nf_errors_log2013", new ShardStats[]{})); indexStats.put("nf_errors_log2012", new IndexStats("nf_errors_log2012", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(); verify(elasticsearchIndexManager, times(1)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); verify(elasticsearchIndexManager, times(1)).deleteIndices(any(Client.class), eq("nf_errors_log2012"), eq(AUTO_CREATE_INDEX_TIMEOUT)); verify(elasticsearchIndexManager, times(1)).deleteIndices(any(Client.class), eq("nf_errors_log2013"), eq(AUTO_CREATE_INDEX_TIMEOUT)); verify(elasticsearchIndexManager, times(0)).preCreateIndex(any(Client.class), any(IndexMetadata.class), any(DateTime.class)); } @Test public void testCheckIndexRetention_Hourly() throws IOException, UnsupportedAutoIndexException { String serializedIndexMetadata = "[{\"preCreate\": false, \"retentionType\": \"hourly\", \"retentionPeriod\": 2, \"indexName\": \"nf_errors_log\"}]"; List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata(serializedIndexMetadata); IndexMetadata indexMetadata = indexMetadataList.get(0); Set<String> indices = new HashSet<>( Arrays.asList("nf_errors_log2017062210", "nf_errors_log2017062211", "nf_errors_log2017062212", "nf_errors_log2017062213", "nf_errors_log2017062214")); elasticsearchIndexManager.checkIndexRetention(elasticsearchClient, indices, indexMetadata, new DateTime("2017-06-22T13:30Z")); verify(elasticsearchIndexManager, times(1)).deleteIndices(any(Client.class), eq("nf_errors_log2017062210"), eq(AUTO_CREATE_INDEX_TIMEOUT)); } @Test public void testCheckIndexRetention_Overlapping() throws Exception { String serializedIndexMetadata = "[{\"preCreate\": false, \"retentionType\": \"hourly\", \"retentionPeriod\": 2, \"indexName\": \"nf_errors_log\"}," + "{\"preCreate\": false, \"retentionType\": \"yearly\", \"retentionPeriod\": 3, \"indexName\": \"nf_errors_log201712\"}]"; List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata(serializedIndexMetadata); Map<String, IndexStats> indexStats = new HashMap<>(); indexStats.put("nf_errors_log2017121110", new IndexStats("nf_errors_log2017121110", new ShardStats[]{})); indexStats.put("nf_errors_log2017121111", new IndexStats("nf_errors_log2017121111", new ShardStats[]{})); indexStats.put("nf_errors_log2017121112", new IndexStats("nf_errors_log2017121112", new ShardStats[]{})); indexStats.put("nf_errors_log2017121113", new IndexStats("nf_errors_log2017121113", new ShardStats[]{})); indexStats.put("nf_errors_log2017121114", new IndexStats("nf_errors_log2017121114", new ShardStats[]{})); IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); when(indicesStatsResponse.getIndices()).thenReturn(indexStats); doReturn(indicesStatsResponse).when(elasticsearchIndexManager).getIndicesStatsResponse(elasticsearchClient); elasticsearchIndexManager.runIndexManagement(elasticsearchClient, indexMetadataList, new DateTime("2017-12-11T13:30Z")); verify(elasticsearchIndexManager, times(2)).checkIndexRetention(any(Client.class), anySet(), any(IndexMetadata.class), any(DateTime.class)); } @After public void cleanUp() throws Exception { ManagementFactory.getPlatformMBeanServer().unregisterMBean( new ObjectName("com.netflix.raigad.scheduler:type=" + ElasticsearchIndexManager.class.getName())); } }
5,071
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestIndexMetadata.java
package com.netflix.raigad.indexmanagement; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; import org.joda.time.Period; import org.junit.Test; import java.io.IOException; import java.util.List; import static org.junit.Assert.*; public class TestIndexMetadata { @Test public void testBadInputNoIndexName() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"retentionType\": \"monthly\",\"retentionPeriod\": 20}]"); assertEquals(indexMetadataList.size(), 1); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("index201312")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("index20131212")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("a20141233")); assertFalse(indexMetadataList.get(0).isPreCreate()); assertEquals(indexMetadataList.get(0).getRetentionPeriod().toString(), "P20M"); assertEquals(indexMetadataList.get(0).getIndexNamePattern(), null); assertFalse(indexMetadataList.get(0).isActionable()); } @Test public void testBadInputNoRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"retentionType\": \"monthly\", \"indexName\": \"nf_errors_log\"}]"); assertEquals(indexMetadataList.size(), 1); assertTrue(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadataList.get(0).isPreCreate()); assertEquals(indexMetadataList.get(0).getRetentionPeriod(), null); assertFalse(indexMetadataList.get(0).isActionable()); } @Test public void testBadInputInvalidSymbols() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"retentionType\":\"monthly\",\"indexName\":\"nf_errors_log\",\"retentionPeriod?:6,?preCreate\":false}]"); assertEquals(indexMetadataList.size(), 1); assertTrue(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadataList.get(0).getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadataList.get(0).isPreCreate()); assertEquals(indexMetadataList.get(0).getRetentionPeriod(), null); assertFalse(indexMetadataList.get(0).isActionable()); } @Test(expected = JsonMappingException.class) public void testBadInputInvalidRetention() throws IOException { IndexUtils.parseIndexMetadata( "[{\"retentionType\": \"monthly\", \"indexName\": \"nf_errors_log\",\"retentionPeriod\":\"A\"}]"); } @Test(expected = JsonMappingException.class) public void testBadInputInvalidNamePattern() throws IOException { IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"nf_errors_logYYYY\",\"retentionPeriod\":\"P1M\"}]"); } @Test(expected = JsonParseException.class) public void testBadInputBadJson() throws IOException { IndexUtils.parseIndexMetadata("[{\"retentionType\": \"monthly\", \"indexName\": \"nf_errors_log\","); } @Test public void testFiveMinuteRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionPeriod\":\"PT5M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals(Period.minutes(5), indexMetadata.getRetentionPeriod()); } @Test public void testOneHourRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionPeriod\":\"PT1H\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals(Period.hours(1), indexMetadata.getRetentionPeriod()); } @Test public void test18MonthRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionPeriod\":\"P18M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals(Period.months(18), indexMetadata.getRetentionPeriod()); } @Test public void testNamePatternOverridesRetentionType() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"retentionType\":\"daily\",\"retentionPeriod\":\"P18M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals("'nf_errors_log'YYYY", indexMetadata.getIndexNamePattern()); } @Test public void testNamePatternOverridesIndexName() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[{\"indexNamePattern\": \"'nf_errors_log'YYYY\",\"indexName\":\"errors\",\"retentionPeriod\":\"P18M\"}]"); IndexMetadata indexMetadata = indexMetadataList.get(0); assertEquals("'nf_errors_log'YYYY", indexMetadata.getIndexNamePattern()); } @Test public void testMixedRetention() throws IOException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[ { \"retentionType\": \"yearly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"monthly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"daily\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"false\" }]"); assertEquals(indexMetadataList.size(), 4); IndexMetadata indexMetadata = indexMetadataList.get(0); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_log2013")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYY"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "P20Y"); assertTrue(indexMetadata.isActionable()); indexMetadata = indexMetadataList.get(1); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log201312")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_lgg201312")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYYMM"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "P20M"); assertTrue(indexMetadata.isActionable()); indexMetadata = indexMetadataList.get(2); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121201")); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121200")); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121223")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_lgg2013121223")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log2013121224")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertTrue(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYYMMddHH"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "PT20H"); assertTrue(indexMetadata.isActionable()); indexMetadata = indexMetadataList.get(3); assertTrue(indexMetadata.getIndexNameFilter().filter("nf_errors_log20131212")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_lgg20141230")); assertFalse(indexMetadata.getIndexNameFilter().filter("nf_errors_log20141233")); assertFalse(indexMetadata.isPreCreate()); assertEquals(indexMetadata.getIndexNamePattern(), "'nf_errors_log'YYYYMMdd"); assertEquals(indexMetadata.getRetentionPeriod().toString(), "P20D"); assertTrue(indexMetadata.isActionable()); } }
5,072
0
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/test/java/com/netflix/raigad/indexmanagement/TestIndexUtils.java
package com.netflix.raigad.indexmanagement; import com.netflix.raigad.indexmanagement.exception.UnsupportedAutoIndexException; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.junit.Test; import java.io.IOException; import java.util.List; import static org.junit.Assert.assertEquals; public class TestIndexUtils { private static DateTime dateTime(int v, String fmt) { return DateTimeFormat.forPattern(fmt).withZoneUTC().parseDateTime("" + v); } @Test public void testPastRetentionCutoffDate() throws IOException, UnsupportedAutoIndexException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[ { \"retentionType\": \"yearly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"monthly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 40, \"indexName\": \"nf_errors_log\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"daily\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log\", \"preCreate\": \"false\" }]"); IndexMetadata yearlyMetadata = indexMetadataList.get(0); IndexMetadata monthlyMetadata = indexMetadataList.get(1); IndexMetadata hourlyMetadata20 = indexMetadataList.get(2); IndexMetadata hourlyMetadata40 = indexMetadataList.get(3); IndexMetadata dailyMetadata = indexMetadataList.get(4); DateTime currentDateTime = new DateTime("2017-11-15T12:34:56Z"); assertEquals(dateTime(1997, "yyyy"), yearlyMetadata.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(201603, "yyyyMM"), monthlyMetadata.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(20171026, "yyyyMMdd"), dailyMetadata.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(2017111416, "yyyyMMddHH"), hourlyMetadata20.getPastRetentionCutoffDate(currentDateTime)); assertEquals(dateTime(2017111320, "yyyyMMddHH"), hourlyMetadata40.getPastRetentionCutoffDate(currentDateTime)); } @Test public void testIndexNameToPreCreate() throws IOException, UnsupportedAutoIndexException { List<IndexMetadata> indexMetadataList = IndexUtils.parseIndexMetadata( "[ { \"retentionType\": \"yearly\", \"retentionPeriod\": 20, \"indexName\": \"index\" }," + "{ \"retentionType\": \"monthly\", \"retentionPeriod\": 20, \"indexName\": \"0\" }," + "{ \"retentionType\": \"hourly\", \"retentionPeriod\": 20, \"indexName\": \"index1\", \"preCreate\": \"true\" }," + "{ \"retentionType\": \"daily\", \"retentionPeriod\": 20, \"indexName\": \"nf_errors_log_useast1\", \"preCreate\": \"false\" }]"); IndexMetadata yearlyMetadata = indexMetadataList.get(0); IndexMetadata monthlyMetadata = indexMetadataList.get(1); IndexMetadata hourlyMetadata = indexMetadataList.get(2); IndexMetadata dailyMetadata = indexMetadataList.get(3); DateTime currentDateTime = new DateTime("2017-11-15T12:34:56Z"); assertEquals("index2018", yearlyMetadata.getIndexNameToPreCreate(currentDateTime)); assertEquals("0201712", monthlyMetadata.getIndexNameToPreCreate(currentDateTime)); assertEquals("nf_errors_log_useast120171116", dailyMetadata.getIndexNameToPreCreate(currentDateTime)); assertEquals("index12017111513", hourlyMetadata.getIndexNameToPreCreate(currentDateTime)); } }
5,073
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/RaigadConfiguration.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import com.amazonaws.services.ec2.model.*; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.config.*; import com.netflix.raigad.aws.ICredential; import com.netflix.raigad.utils.RetriableCallable; import com.netflix.raigad.utils.SystemUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; @Singleton public class RaigadConfiguration implements IConfiguration { private static final Logger logger = LoggerFactory.getLogger(RaigadConfiguration.class); public static final String MY_WEBAPP_NAME = "Raigad"; private static final String CONFIG_CLUSTER_NAME = MY_WEBAPP_NAME + ".es.clustername"; private static final String CONFIG_AVAILABILITY_ZONES = MY_WEBAPP_NAME + ".zones.available"; private static final String CONFIG_DATA_LOCATION = MY_WEBAPP_NAME + ".es.data.location"; private static final String CONFIG_LOG_LOCATION = MY_WEBAPP_NAME + ".es.log.location"; private static final String CONFIG_ES_START_SCRIPT = MY_WEBAPP_NAME + ".es.startscript"; private static final String CONFIG_ES_STOP_SCRIPT = MY_WEBAPP_NAME + ".es.stopscript"; private static final String CONFIG_ES_HOME = MY_WEBAPP_NAME + ".es.home"; private static final String CONFIG_FD_PING_INTERVAL = MY_WEBAPP_NAME + ".es.fd.pinginterval"; private static final String CONFIG_FD_PING_TIMEOUT = MY_WEBAPP_NAME + ".es.fd.pingtimeout"; private static final String CONFIG_HTTP_PORT = MY_WEBAPP_NAME + ".es.http.port"; private static final String CONFIG_TRANSPORT_TCP_PORT = MY_WEBAPP_NAME + ".es.transport.tcp.port"; private static final String CONFIG_MIN_MASTER_NODES = MY_WEBAPP_NAME + ".es.min.master.nodes"; private static final String CONFIG_NUM_REPLICAS = MY_WEBAPP_NAME + ".es.num.replicas"; private static final String CONFIG_NUM_SHARDS = MY_WEBAPP_NAME + ".es.num.shards"; private static final String CONFIG_PING_TIMEOUT = MY_WEBAPP_NAME + ".es.pingtimeout"; private static final String CONFIG_INDEX_REFRESH_INTERVAL = MY_WEBAPP_NAME + ".es.index.refresh.interval"; private static final String CONFIG_IS_MASTER_QUORUM_ENABLED = MY_WEBAPP_NAME + ".es.master.quorum.enabled"; private static final String CONFIG_IS_PING_MULTICAST_ENABLED = MY_WEBAPP_NAME + ".es.ping.multicast.enabled"; private static final String CONFIG_ES_DISCOVERY_TYPE = MY_WEBAPP_NAME + ".es.discovery.type"; private static final String CONFIG_BOOTCLUSTER_NAME = MY_WEBAPP_NAME + ".bootcluster"; private static final String CONFIG_INSTANCE_DATA_RETRIEVER = MY_WEBAPP_NAME + ".instanceDataRetriever"; private static final String CONFIG_CREDENTIAL_PROVIDER = MY_WEBAPP_NAME + ".credentialProvider"; private static final String CONFIG_SECURITY_GROUP_NAME = MY_WEBAPP_NAME + ".security.group.name"; private static final String CONFIG_IS_MULTI_DC_ENABLED = MY_WEBAPP_NAME + ".es.multi.dc.enabled"; private static final String CONFIG_IS_ASG_BASED_DEPLOYMENT_ENABLED = MY_WEBAPP_NAME + ".es.asg.based.deployment.enabled"; private static final String CONFIG_ES_CLUSTER_ROUTING_ATTRIBUTES = MY_WEBAPP_NAME + ".es.cluster.routing.attributes"; private static final String CONFIG_ES_PROCESS_NAME = MY_WEBAPP_NAME + ".es.processname"; private static final String CONFIG_ES_SHARD_ALLOCATION_ATTRIBUTE = MY_WEBAPP_NAME + ".es.shard.allocation.attribute"; private static final String CONFIG_IS_SHARD_ALLOCATION_POLICY_ENABLED = MY_WEBAPP_NAME + ".shard.allocation.policy.enabled"; private static final String CONFIG_EXTRA_PARAMS = MY_WEBAPP_NAME + ".extra.params"; private static final String CONFIG_IS_DEBUG_ENABLED = MY_WEBAPP_NAME + ".debug.enabled"; private static final String CONFIG_IS_SHARDS_PER_NODE_ENABLED = MY_WEBAPP_NAME + ".shards.per.node.enabled"; private static final String CONFIG_SHARDS_PER_NODE = MY_WEBAPP_NAME + ".shards.per.node"; private static final String CONFIG_INDEX_METADATA = MY_WEBAPP_NAME + ".index.metadata"; private static final String CONFIG_IS_INDEX_AUTOCREATION_ENABLED = MY_WEBAPP_NAME + ".index.autocreation.enabled"; private static final String CONFIG_AUTOCREATE_INDEX_TIMEOUT = MY_WEBAPP_NAME + ".autocreate.index.timeout"; private static final String CONFIG_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS = MY_WEBAPP_NAME + ".autocreate.index.initial.start.delay.seconds"; private static final String CONFIG_AUTOCREATE_INDEX_SCHEDULE_MINUTES = MY_WEBAPP_NAME + ".autocreate.index.schedule.minutes"; private static final String CONFIG_BACKUP_LOCATION = MY_WEBAPP_NAME + ".backup.location"; private static final String CONFIG_BACKUP_HOUR = MY_WEBAPP_NAME + ".backup.hour"; private static final String CONFIG_BACKUP_IS_SNAPSHOT_ENABLED = MY_WEBAPP_NAME + ".snapshot.enabled"; private static final String CONFIG_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED = MY_WEBAPP_NAME + ".hourly.snapshot.enabled"; private static final String CONFIG_BACKUP_COMMA_SEPARATED_INDICES = MY_WEBAPP_NAME + ".backup.comma.separated.indices"; private static final String CONFIG_BACKUP_PARTIAL_INDICES = MY_WEBAPP_NAME + ".backup.partial.indices"; private static final String CONFIG_BACKUP_INCLUDE_GLOBAL_STATE = MY_WEBAPP_NAME + ".backup.include.global.state"; private static final String CONFIG_BACKUP_WAIT_FOR_COMPLETION = MY_WEBAPP_NAME + ".backup.wait.for.completion"; private static final String CONFIG_BACKUP_INCLUDE_INDEX_NAME = MY_WEBAPP_NAME + ".backup.include.index.name"; private static final String CONFIG_BACKUP_CRON_TIMER_SECONDS = MY_WEBAPP_NAME + ".backup.cron.timer.seconds"; private static final String CONFIG_IS_RESTORE_ENABLED = MY_WEBAPP_NAME + ".restore.enabled"; private static final String CONFIG_RESTORE_REPOSITORY_NAME = MY_WEBAPP_NAME + ".restore.repository.name"; private static final String CONFIG_RESTORE_REPOSITORY_TYPE = MY_WEBAPP_NAME + ".restore.repository.type"; private static final String CONFIG_RESTORE_SNAPSHOT_NAME = MY_WEBAPP_NAME + ".restore.snapshot.name"; private static final String CONFIG_RESTORE_COMMA_SEPARATED_INDICES = MY_WEBAPP_NAME + ".restore.comma.separated.indices"; private static final String CONFIG_RESTORE_TASK_INITIAL_START_DELAY_SECONDS = MY_WEBAPP_NAME + ".restore.task.initial.start.delay.seconds"; private static final String CONFIG_RESTORE_SOURCE_CLUSTER_NAME = MY_WEBAPP_NAME + ".restore.source.cluster.name"; private static final String CONFIG_RESTORE_SOURCE_REPO_REGION = MY_WEBAPP_NAME + ".restore.source.repo.region"; private static final String CONFIG_RESTORE_LOCATION = MY_WEBAPP_NAME + ".restore.location"; private static final String CONFIG_AM_I_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.enabled"; private static final String CONFIG_AM_I_WRITE_ENABLED_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.write.enabled"; private static final String CONFIG_AM_I_METADATA_ENABLED_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.metadata.enabled"; private static final String CONFIG_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS = MY_WEBAPP_NAME + ".tribe.comma.separated.source.clusters"; private static final String CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE = MY_WEBAPP_NAME + ".tribe.node.source.cluster.enabled"; private static final String CONFIG_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS = MY_WEBAPP_NAME + ".tribe.comma.separated.tribe.clusters"; private static final String CONFIG_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED = MY_WEBAPP_NAME + ".nodemismatch.health.metrics.enabled"; private static final String CONFIG_DESIRED_NUM_NODES_IN_CLUSTER = MY_WEBAPP_NAME + ".desired.num.nodes.in.cluster"; private static final String CONFIG_IS_EUREKA_HEALTH_CHECK_ENABLED = MY_WEBAPP_NAME + ".eureka.health.check.enabled"; private static final String CONFIG_IS_LOCAL_MODE_ENABLED = MY_WEBAPP_NAME + ".local.mode.enabled"; private static final String CONFIG_CASSANDRA_KEYSPACE_NAME = MY_WEBAPP_NAME + ".cassandra.keyspace.name"; private static final String CONFIG_CASSANDRA_THRIFT_PORT = MY_WEBAPP_NAME + ".cassandra.thrift.port"; private static final String CONFIG_IS_EUREKA_HOST_SUPPLIER_ENABLED = MY_WEBAPP_NAME + ".eureka.host.supplier.enabled"; private static final String CONFIG_COMMA_SEPARATED_CASSANDRA_HOSTNAMES = MY_WEBAPP_NAME + ".comma.separated.cassandra.hostnames"; private static final String CONFIG_IS_SECURITY_GROUP_IN_MULTI_DC = MY_WEBAPP_NAME + ".security.group.in.multi.dc.enabled"; private static final String CONFIG_IS_KIBANA_SETUP_REQUIRED = MY_WEBAPP_NAME + ".kibana.setup.required"; private static final String CONFIG_KIBANA_PORT = MY_WEBAPP_NAME + ".kibana.port"; private static final String CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC = MY_WEBAPP_NAME + ".tribe.node.source.cluster.enabled.in.multi.dc"; private static final String CONFIG_REPORT_METRICS_FROM_MASTER_ONLY = MY_WEBAPP_NAME + ".report.metrics.from.master.only"; private static final String CONFIG_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT = MY_WEBAPP_NAME + ".tribe.preferred.cluster.id.on.conflict"; // Amazon specific private static final String CONFIG_ASG_NAME = MY_WEBAPP_NAME + ".az.asgname"; private static final String CONFIG_STACK_NAME = MY_WEBAPP_NAME + ".az.stack"; private static final String CONFIG_REGION_NAME = MY_WEBAPP_NAME + ".az.region"; private static final String CONFIG_ACL_GROUP_NAME = MY_WEBAPP_NAME + ".acl.groupname"; private static final String CONFIG_ACL_GROUP_NAME_FOR_VPC = MY_WEBAPP_NAME + ".acl.groupname.vpc"; private static Boolean IS_DEPLOYED_IN_VPC = false; private static Boolean IS_VPC_EXTERNAL = false; private static final String MAC_ID = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/mac"); private static String VPC_ID = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/network/interfaces/macs/" + MAC_ID + "/vpc-id").trim(); private static String PUBLIC_HOSTNAME, PUBLIC_IP, ACL_GROUP_ID_FOR_VPC; { if (StringUtils.equals(VPC_ID, SystemUtils.NOT_FOUND_STR)) { PUBLIC_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-hostname").trim(); PUBLIC_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-ipv4").trim(); } else { IS_DEPLOYED_IN_VPC = true; IS_VPC_EXTERNAL = true; PUBLIC_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-hostname").trim(); if (StringUtils.equals(PUBLIC_HOSTNAME, SystemUtils.NOT_FOUND_STR)) { // Looks like this is VPC internal, trying local hostname PUBLIC_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-hostname").trim(); IS_VPC_EXTERNAL = false; } logger.info("Node host name initialized with {}", PUBLIC_HOSTNAME); PUBLIC_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/public-ipv4").trim(); if (StringUtils.equals(PUBLIC_IP, SystemUtils.NOT_FOUND_STR)) { // Looks like this is VPC internal, trying local IP PUBLIC_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-ipv4").trim(); IS_VPC_EXTERNAL = false; } logger.info("Node IP initialized with {}", PUBLIC_IP); } } private static final String RAC = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/placement/availability-zone"); private static final String LOCAL_HOSTNAME = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-hostname").trim(); private static final String LOCAL_IP = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/local-ipv4").trim(); private static final String INSTANCE_ID = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/instance-id").trim(); private static final String INSTANCE_TYPE = SystemUtils.getDataFromUrl("http://169.254.169.254/latest/meta-data/instance-type").trim(); private static final String ES_NODE_NAME = RAC + "." + INSTANCE_ID; private static String ASG_NAME = System.getenv("ASG_NAME"); private static String STACK_NAME = System.getenv("STACK_NAME"); private static String REGION = System.getenv("EC2_REGION"); // Defaults private final String DEFAULT_CLUSTER_NAME = "es_samplecluster"; private List<String> DEFAULT_AVAILABILITY_ZONES = ImmutableList.of(); private static final String DEFAULT_DATA_LOCATION = "/mnt/data/es"; private static final String DEFAULT_LOG_LOCATION = "/logs/es"; private static final String DEFAULT_YAML_LOCATION = "/apps/elasticsearch/config/elasticsearch.yml"; private static final String DEFAULT_ES_START_SCRIPT = "/etc/init.d/elasticsearch start"; private static final String DEFAULT_ES_STOP_SCRIPT = "/etc/init.d/elasticsearch stop"; private static final String DEFAULT_ES_HOME = "/apps/elasticsearch"; private static final String DEFAULT_FD_PING_INTERVAL = "30s"; private static final String DEFAULT_FD_PING_TIMEOUT = "30s"; private static final int DEFAULT_HTTP_PORT = 7104; private static final int DEFAULT_TRANSPORT_TCP_PORT = 7102; private static final int DEFAULT_MIN_MASTER_NODES = 1; private static final int DEFAULT_NUM_REPLICAS = 2; private static final int DEFAULT_NUM_SHARDS = 5; private static final String DEFAULT_PING_TIMEOUT = "60s"; private static final String DEFAULT_INDEX_REFRESH_INTERVAL = "1m"; private static final boolean DEFAULT_IS_MASTER_QUORUM_ENABLED = true; private static final boolean DEFAULT_IS_PING_MULTICAST_ENABLED = false; private static final String DEFAULT_CONFIG_BOOTCLUSTER_NAME = "cass_metadata"; private static final String DEFAULT_CREDENTIAL_PROVIDER = "com.netflix.raigad.aws.IAMCredential"; private static final String DEFAULT_ES_DISCOVERY_TYPE = "raigad"; private static final boolean DEFAULT_IS_MULTI_DC_ENABLED = false; private static final boolean DEFAULT_IS_ASG_BASED_DEPLOYMENT_ENABLED = false; private static final String DEFAULT_ES_CLUSTER_ROUTING_ATTRIBUTES = "rack_id"; private static final String DEFAULT_ES_PROCESS_NAME = "org.elasticsearch.bootstrap.Elasticsearch"; private static final boolean DEFAULT_IS_SHARD_ALLOCATION_POLICY_ENABLED = false; private static final String DEFAULT_ES_SHARD_ALLOCATION_ATTRIBUTE = "all"; private static final String DEFAULT_CONFIG_EXTRA_PARAMS = null; private static final boolean DEFAULT_IS_DEBUG_ENABLED = false; private static final boolean DEFAULT_IS_SHARDS_PER_NODE_ENABLED = false; private static final int DEFAULT_SHARDS_PER_NODE = 5; private static final boolean DEFAULT_IS_INDEX_AUTOCREATION_ENABLED = false; private static final int DEFAULT_AUTOCREATE_INDEX_TIMEOUT = 300000; private static final int DEFAULT_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS = 300; private static final int DEFAULT_AUTOCREATE_INDEX_SCHEDULE_MINUTES = 10; private static final String DEFAULT_INDEX_METADATA = null; private static final String DEFAULT_BACKUP_LOCATION = "elasticsearch-us-east-1-backup"; private static final int DEFAULT_BACKUP_HOUR = 1; private static final String DEFAULT_BACKUP_COMMA_SEPARATED_INDICES = "_all"; private static final boolean DEFAULT_BACKUP_PARTIAL_INDICES = false; private static final boolean DEFAULT_BACKUP_INCLUDE_GLOBAL_STATE = false; private static final boolean DEFAULT_BACKUP_WAIT_FOR_COMPLETION = true; private static final boolean DEFAULT_BACKUP_INCLUDE_INDEX_NAME = false; private static final boolean DEFAULT_IS_RESTORE_ENABLED = false; private static final String DEFAULT_RESTORE_REPOSITORY_NAME = "testrepo"; private static final String DEFAULT_RESTORE_REPOSITORY_TYPE = "s3"; private static final String DEFAULT_RESTORE_SNAPSHOT_NAME = ""; private static final String DEFAULT_RESTORE_COMMA_SEPARATED_INDICES = "_all"; private static final int DEFAULT_RESTORE_TASK_INITIAL_START_DELAY_SECONDS = 600; private static final String DEFAULT_RESTORE_SOURCE_CLUSTER_NAME = ""; private static final String DEFAULT_RESTORE_SOURCE_REPO_REGION = "us-east-1"; private static final String DEFAULT_RESTORE_LOCATION = "elasticsearch-us-east-1-backup"; private static final boolean DEFAULT_BACKUP_IS_SNAPSHOT_ENABLED = false; private static final boolean DEFAULT_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED = false; private static final long DEFAULT_BACKUP_CRON_TIMER_SECONDS = 3600; private static final boolean DEFAULT_AM_I_TRIBE_NODE = false; private static final boolean DEFAULT_AM_I_WRITE_ENABLED_TRIBE_NODE = false; private static final boolean DEFAULT_AM_I_METADATA_ENABLED_TRIBE_NODE = false; private static final String DEFAULT_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS = ""; private static final boolean DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE = false; private static final String DEFAULT_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS = ""; private static final boolean DEFAULT_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED = false; private static final int DEFAULT_DESIRED_NUM_NODES_IN_CLUSTER = 6; private static final boolean DEFAULT_IS_EUREKA_HEALTH_CHECK_ENABLED = true; private static final boolean DEFAULT_IS_LOCAL_MODE_ENABLED = false; private static final String DEFAULT_CASSANDRA_KEYSPACE_NAME = "escarbootstrap"; private static final int DEFAULT_CASSANDRA_THRIFT_PORT = 7102; private static final boolean DEFAULT_IS_EUREKA_HOST_SUPPLIER_ENABLED = true; private static final String DEFAULT_COMMA_SEPARATED_CASSANDRA_HOSTNAMES = ""; private static final boolean DEFAULT_IS_SECURITY_GROUP_IN_MULTI_DC = false; private static final boolean DEFAULT_IS_KIBANA_SETUP_REQUIRED = false; private static final int DEFAULT_KIBANA_PORT = 8001; private static final boolean DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC = false; private static final boolean DEFAULT_REPORT_METRICS_FROM_MASTER_ONLY = false; private static final String DEFAULT_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT = "t0"; private static final String DEFAULT_ACL_GROUP_NAME_FOR_VPC = "es_samplecluster"; private final IConfigSource config; private final ICredential provider; private final DynamicStringProperty CREDENTIAL_PROVIDER = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_CREDENTIAL_PROVIDER, DEFAULT_CREDENTIAL_PROVIDER); private final DynamicStringProperty ES_STARTUP_SCRIPT_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_START_SCRIPT, DEFAULT_ES_START_SCRIPT); private final DynamicStringProperty ES_STOP_SCRIPT_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_STOP_SCRIPT, DEFAULT_ES_STOP_SCRIPT); private final DynamicStringProperty DATA_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_DATA_LOCATION, DEFAULT_DATA_LOCATION); private final DynamicStringProperty LOG_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_LOG_LOCATION, DEFAULT_LOG_LOCATION); private final DynamicStringProperty ES_HOME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_HOME, DEFAULT_ES_HOME); private final DynamicStringProperty FD_PING_INTERVAL = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_FD_PING_INTERVAL, DEFAULT_FD_PING_INTERVAL); private final DynamicStringProperty FD_PING_TIMEOUT = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_FD_PING_TIMEOUT, DEFAULT_FD_PING_TIMEOUT); private final DynamicIntProperty ES_HTTP_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_HTTP_PORT, DEFAULT_HTTP_PORT); private final DynamicIntProperty ES_TRANSPORT_TCP_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_TRANSPORT_TCP_PORT, DEFAULT_TRANSPORT_TCP_PORT); private final DynamicIntProperty MINIMUM_MASTER_NODES = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_MIN_MASTER_NODES, DEFAULT_MIN_MASTER_NODES); private final DynamicIntProperty NUM_REPLICAS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_NUM_REPLICAS, DEFAULT_NUM_REPLICAS); private final DynamicIntProperty NUM_SHARDS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_NUM_SHARDS, DEFAULT_NUM_SHARDS); private final DynamicStringProperty PING_TIMEOUT = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_PING_TIMEOUT, DEFAULT_PING_TIMEOUT); private final DynamicStringProperty INDEX_REFRESH_INTERVAL = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_INDEX_REFRESH_INTERVAL, DEFAULT_INDEX_REFRESH_INTERVAL); private final DynamicBooleanProperty IS_MASTER_QUORUM_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_MASTER_QUORUM_ENABLED, DEFAULT_IS_MASTER_QUORUM_ENABLED); private final DynamicBooleanProperty IS_PING_MULTICAST_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_PING_MULTICAST_ENABLED, DEFAULT_IS_PING_MULTICAST_ENABLED); private final DynamicStringProperty BOOTCLUSTER_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_BOOTCLUSTER_NAME, DEFAULT_CONFIG_BOOTCLUSTER_NAME); private final DynamicStringProperty ES_DISCOVERY_TYPE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_DISCOVERY_TYPE, DEFAULT_ES_DISCOVERY_TYPE); private final DynamicStringProperty SECURITY_GROUP_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_SECURITY_GROUP_NAME, DEFAULT_CLUSTER_NAME); private final DynamicBooleanProperty IS_MULTI_DC_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_MULTI_DC_ENABLED, DEFAULT_IS_MULTI_DC_ENABLED); private final DynamicBooleanProperty IS_ASG_BASED_DEPLOYMENT_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_ASG_BASED_DEPLOYMENT_ENABLED, DEFAULT_IS_ASG_BASED_DEPLOYMENT_ENABLED); private final DynamicStringProperty ES_CLUSTER_ROUTING_ATTRIBUTES = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_CLUSTER_ROUTING_ATTRIBUTES, DEFAULT_ES_CLUSTER_ROUTING_ATTRIBUTES); private final DynamicBooleanProperty IS_SHARD_ALLOCATION_POLICY_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_SHARD_ALLOCATION_POLICY_ENABLED, DEFAULT_IS_SHARD_ALLOCATION_POLICY_ENABLED); private final DynamicStringProperty ES_SHARD_ALLOCATION_ATTRIBUTE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_SHARD_ALLOCATION_ATTRIBUTE, DEFAULT_ES_SHARD_ALLOCATION_ATTRIBUTE); private final DynamicStringProperty EXTRA_PARAMS = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_EXTRA_PARAMS, DEFAULT_CONFIG_EXTRA_PARAMS); private final DynamicBooleanProperty IS_DEBUG_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_DEBUG_ENABLED, DEFAULT_IS_DEBUG_ENABLED); private final DynamicBooleanProperty IS_SHARDS_PER_NODE_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_SHARDS_PER_NODE_ENABLED, DEFAULT_IS_SHARDS_PER_NODE_ENABLED); private final DynamicIntProperty TOTAL_SHARDS_PER_NODES = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_SHARDS_PER_NODE, DEFAULT_SHARDS_PER_NODE); private final DynamicStringProperty INDEX_METADATA = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_INDEX_METADATA, DEFAULT_INDEX_METADATA); private final DynamicBooleanProperty IS_INDEX_AUTOCREATION_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_INDEX_AUTOCREATION_ENABLED, DEFAULT_IS_INDEX_AUTOCREATION_ENABLED); private final DynamicIntProperty AUTOCREATE_INDEX_TIMEOUT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_AUTOCREATE_INDEX_TIMEOUT, DEFAULT_AUTOCREATE_INDEX_TIMEOUT); private final DynamicIntProperty AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS, DEFAULT_AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS); private final DynamicIntProperty AUTOCREATE_INDEX_SCHEDULE_MINUTES = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_AUTOCREATE_INDEX_SCHEDULE_MINUTES, DEFAULT_AUTOCREATE_INDEX_SCHEDULE_MINUTES); private final DynamicStringProperty ES_PROCESS_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ES_PROCESS_NAME, DEFAULT_ES_PROCESS_NAME); private final DynamicStringProperty BUCKET_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_BACKUP_LOCATION, DEFAULT_BACKUP_LOCATION); private final DynamicIntProperty BACKUP_HOUR = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_BACKUP_HOUR, DEFAULT_BACKUP_HOUR); private final DynamicStringProperty COMMA_SEPARATED_INDICES_TO_BACKUP = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_BACKUP_COMMA_SEPARATED_INDICES, DEFAULT_BACKUP_COMMA_SEPARATED_INDICES); private final DynamicBooleanProperty PARTIALLY_BACKUP_INDICES = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_PARTIAL_INDICES, DEFAULT_BACKUP_PARTIAL_INDICES); private final DynamicBooleanProperty INCLUDE_GLOBAL_STATE_DURING_BACKUP = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_INCLUDE_GLOBAL_STATE, DEFAULT_BACKUP_INCLUDE_GLOBAL_STATE); private final DynamicBooleanProperty WAIT_FOR_COMPLETION_OF_BACKUP = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_WAIT_FOR_COMPLETION, DEFAULT_BACKUP_WAIT_FOR_COMPLETION); private final DynamicBooleanProperty INCLUDE_INDEX_NAME_IN_SNAPSHOT_BACKUP = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_INCLUDE_INDEX_NAME, DEFAULT_BACKUP_INCLUDE_INDEX_NAME); private final DynamicBooleanProperty IS_RESTORE_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_RESTORE_ENABLED, DEFAULT_IS_RESTORE_ENABLED); private final DynamicStringProperty RESTORE_REPOSITORY_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_REPOSITORY_NAME, DEFAULT_RESTORE_REPOSITORY_NAME); private final DynamicStringProperty RESTORE_REPOSITORY_TYPE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_REPOSITORY_TYPE, DEFAULT_RESTORE_REPOSITORY_TYPE); private final DynamicStringProperty RESTORE_SNAPSHOT_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_SNAPSHOT_NAME, DEFAULT_RESTORE_SNAPSHOT_NAME); private final DynamicStringProperty COMMA_SEPARATED_INDICES_TO_RESTORE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_COMMA_SEPARATED_INDICES, DEFAULT_RESTORE_COMMA_SEPARATED_INDICES); private final DynamicIntProperty RESTORE_TASK_INITIAL_START_DELAY_SECONDS = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_RESTORE_TASK_INITIAL_START_DELAY_SECONDS, DEFAULT_RESTORE_TASK_INITIAL_START_DELAY_SECONDS); private final DynamicStringProperty RESTORE_SOURCE_CLUSTER_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_SOURCE_CLUSTER_NAME, DEFAULT_RESTORE_SOURCE_CLUSTER_NAME); private final DynamicStringProperty RESTORE_SOURCE_REPO_REGION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_SOURCE_REPO_REGION, DEFAULT_RESTORE_SOURCE_REPO_REGION); private final DynamicStringProperty RESTORE_LOCATION = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_RESTORE_LOCATION, DEFAULT_RESTORE_LOCATION); private final DynamicBooleanProperty IS_SNAPSHOT_BACKUP_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_IS_SNAPSHOT_ENABLED, DEFAULT_BACKUP_IS_SNAPSHOT_ENABLED); private final DynamicBooleanProperty IS_HOURLY_SNAPSHOT_BACKUP_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED, DEFAULT_BACKUP_IS_HOURLY_SNAPSHOT_ENABLED); private final DynamicLongProperty BACKUP_CRON_TIMER_SECONDS = DynamicPropertyFactory.getInstance().getLongProperty(CONFIG_BACKUP_CRON_TIMER_SECONDS, DEFAULT_BACKUP_CRON_TIMER_SECONDS); private final DynamicBooleanProperty AM_I_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_TRIBE_NODE, DEFAULT_AM_I_TRIBE_NODE); private final DynamicBooleanProperty AM_I_WRITE_ENABLED_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_WRITE_ENABLED_TRIBE_NODE, DEFAULT_AM_I_WRITE_ENABLED_TRIBE_NODE); private final DynamicBooleanProperty AM_I_METADATA_ENABLED_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_METADATA_ENABLED_TRIBE_NODE, DEFAULT_AM_I_METADATA_ENABLED_TRIBE_NODE); private final DynamicStringProperty COMMA_SEPARATED_SOURCE_CLUSTERS_IN_TRIBE = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS, DEFAULT_TRIBE_COMMA_SEPARATED_SOURCE_CLUSTERS); private final DynamicBooleanProperty AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE, DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE); private final DynamicStringProperty COMMA_SEPARATED_TRIBE_CLUSTERS = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS, DEFAULT_TRIBE_COMMA_SEPARATED_TRIBE_CLUSTERS); private final DynamicBooleanProperty IS_NODE_MISMATCH_WITH_DISCOVERY_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED, DEFAULT_IS_NODEMISMATCH_WITH_DISCOVERY_ENABLED); private final DynamicIntProperty DESIRED_NUM_NODES_IN_CLUSTER = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_DESIRED_NUM_NODES_IN_CLUSTER, DEFAULT_DESIRED_NUM_NODES_IN_CLUSTER); private final DynamicBooleanProperty IS_EUREKA_HEALTH_CHECK_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_EUREKA_HEALTH_CHECK_ENABLED, DEFAULT_IS_EUREKA_HEALTH_CHECK_ENABLED); private final DynamicBooleanProperty IS_LOCAL_MODE_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_LOCAL_MODE_ENABLED, DEFAULT_IS_LOCAL_MODE_ENABLED); private final DynamicStringProperty CASSANDRA_KEYSPACE_NAME = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_CASSANDRA_KEYSPACE_NAME, DEFAULT_CASSANDRA_KEYSPACE_NAME); private final DynamicIntProperty CASSANDRA_THRIFT_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_CASSANDRA_THRIFT_PORT, DEFAULT_CASSANDRA_THRIFT_PORT); private final DynamicBooleanProperty IS_EUREKA_HOST_SUPPLIER_ENABLED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_EUREKA_HOST_SUPPLIER_ENABLED, DEFAULT_IS_EUREKA_HOST_SUPPLIER_ENABLED); private final DynamicStringProperty COMMA_SEPARATED_CASSANDRA_HOSTNAMES = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_COMMA_SEPARATED_CASSANDRA_HOSTNAMES, DEFAULT_COMMA_SEPARATED_CASSANDRA_HOSTNAMES); private final DynamicBooleanProperty IS_SECURITY_GROUP_IN_MULTI_DC = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_SECURITY_GROUP_IN_MULTI_DC, DEFAULT_IS_SECURITY_GROUP_IN_MULTI_DC); private final DynamicBooleanProperty IS_KIBANA_SETUP_REQUIRED = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_IS_KIBANA_SETUP_REQUIRED, DEFAULT_IS_KIBANA_SETUP_REQUIRED); private final DynamicIntProperty KIBANA_PORT = DynamicPropertyFactory.getInstance().getIntProperty(CONFIG_KIBANA_PORT, DEFAULT_KIBANA_PORT); private final DynamicBooleanProperty AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC, DEFAULT_AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC); private final DynamicBooleanProperty REPORT_METRICS_FROM_MASTER_ONLY = DynamicPropertyFactory.getInstance().getBooleanProperty(CONFIG_REPORT_METRICS_FROM_MASTER_ONLY, DEFAULT_REPORT_METRICS_FROM_MASTER_ONLY); private final DynamicStringProperty TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT, DEFAULT_TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT); private final DynamicStringProperty ACL_GROUP_NAME_FOR_VPC = DynamicPropertyFactory.getInstance().getStringProperty(CONFIG_ACL_GROUP_NAME_FOR_VPC, DEFAULT_ACL_GROUP_NAME_FOR_VPC); @Inject public RaigadConfiguration(ICredential provider, IConfigSource config) { this.provider = provider; this.config = config; } @Override public void initialize() { setupEnvVars(); this.config.initialize(ASG_NAME, REGION); setDefaultRACList(REGION); populateProps(); SystemUtils.createDirs(getDataFileLocation()); } private void setupEnvVars() { REGION = StringUtils.isBlank(REGION) ? System.getProperty("EC2_REGION") : REGION; if (StringUtils.isBlank(REGION)) { REGION = RAC.substring(0, RAC.length() - 1); } ASG_NAME = StringUtils.isBlank(ASG_NAME) ? System.getProperty("ASG_NAME") : ASG_NAME; if (StringUtils.isBlank(ASG_NAME)) { ASG_NAME = populateASGName(REGION, INSTANCE_ID); } STACK_NAME = StringUtils.isBlank(STACK_NAME) ? System.getProperty("STACK_NAME") : STACK_NAME; logger.info(String.format("REGION set to [%s], ASG Name set to [%s]", REGION, ASG_NAME)); } /** * Query amazon to get ASG name. Currently not available as part of instance * info api. */ private String populateASGName(String region, String instanceId) { GetASGName getASGName = new GetASGName(region, instanceId); try { return getASGName.call(); } catch (Exception e) { logger.error("Failed to determine ASG name", e); return null; } } private class GetASGName extends RetriableCallable<String> { private static final int NUMBER_OF_RETRIES = 15; private static final long WAIT_TIME = 30000; private final String region; private final String instanceId; private final AmazonEC2 client; public GetASGName(String region, String instanceId) { super(NUMBER_OF_RETRIES, WAIT_TIME); this.region = region; this.instanceId = instanceId; client = new AmazonEC2Client(provider.getAwsCredentialProvider()); client.setEndpoint("ec2." + region + ".amazonaws.com"); } @Override public String retriableCall() throws IllegalStateException { DescribeInstancesRequest desc = new DescribeInstancesRequest().withInstanceIds(instanceId); DescribeInstancesResult res = client.describeInstances(desc); for (Reservation resr : res.getReservations()) { for (Instance ins : resr.getInstances()) { for (com.amazonaws.services.ec2.model.Tag tag : ins.getTags()) { if (tag.getKey().equals("aws:autoscaling:groupName")) return tag.getValue(); } } } logger.warn("Couldn't determine ASG name"); throw new IllegalStateException("Couldn't determine ASG name"); } } /** * Get the fist 3 available zones in the region */ public void setDefaultRACList(String region) { AmazonEC2 client = new AmazonEC2Client(provider.getAwsCredentialProvider()); client.setEndpoint("ec2." + region + ".amazonaws.com"); DescribeAvailabilityZonesResult res = client.describeAvailabilityZones(); List<String> zone = Lists.newArrayList(); for (AvailabilityZone reg : res.getAvailabilityZones()) { if (reg.getState().equals("available")) { zone.add(reg.getZoneName()); } if (zone.size() == 3) { break; } } DEFAULT_AVAILABILITY_ZONES = ImmutableList.copyOf(zone); } private void populateProps() { config.set(CONFIG_ASG_NAME, ASG_NAME); config.set(CONFIG_REGION_NAME, REGION); } @Override public List<String> getRacs() { return config.getList(CONFIG_AVAILABILITY_ZONES, DEFAULT_AVAILABILITY_ZONES); } @Override public String getDC() { return config.get(CONFIG_REGION_NAME, ""); } @Override public void setDC(String region) { config.set(CONFIG_REGION_NAME, region); } @Override public String getASGName() { return config.get(CONFIG_ASG_NAME, ASG_NAME); } @Override public String getStackName() { return config.get(CONFIG_STACK_NAME, STACK_NAME); } @Override public String getACLGroupName() { return config.get(CONFIG_ACL_GROUP_NAME, this.getAppName()); } @Override public String getDataFileLocation() { return DATA_LOCATION.get(); } @Override public String getLogFileLocation() { return LOG_LOCATION.get(); } @Override public String getElasticsearchStartupScript() { return ES_STARTUP_SCRIPT_LOCATION.get(); } @Override public String getYamlLocation() { return DEFAULT_YAML_LOCATION; } @Override public String getBackupLocation() { return BUCKET_NAME.get(); } @Override public String getElasticsearchHome() { return ES_HOME.get(); } @Override public String getElasticsearchStopScript() { return ES_STOP_SCRIPT_LOCATION.get(); } @Override public String getFdPingInterval() { return FD_PING_INTERVAL.get(); } @Override public String getFdPingTimeout() { return FD_PING_TIMEOUT.get(); } @Override public int getHttpPort() { return ES_HTTP_PORT.get(); } @Override public int getTransportTcpPort() { return ES_TRANSPORT_TCP_PORT.get(); } @Override public int getMinimumMasterNodes() { return MINIMUM_MASTER_NODES.get(); } @Override public int getNumOfReplicas() { return NUM_REPLICAS.get(); } @Override public int getTotalShardsPerNode() { return TOTAL_SHARDS_PER_NODES.get(); } @Override public int getNumOfShards() { return NUM_SHARDS.get(); } @Override public String getPingTimeout() { return PING_TIMEOUT.get(); } @Override public String getRefreshInterval() { return INDEX_REFRESH_INTERVAL.get(); } @Override public boolean isMasterQuorumEnabled() { return IS_MASTER_QUORUM_ENABLED.get(); } @Override public boolean isPingMulticastEnabled() { return IS_PING_MULTICAST_ENABLED.get(); } @Override public String getHostIP() { return PUBLIC_IP; } @Override public String getHostname() { return PUBLIC_HOSTNAME; } @Override public String getInstanceName() { return INSTANCE_ID; } @Override public String getInstanceId() { return INSTANCE_ID; } @Override public String getHostLocalIP() { return LOCAL_IP; } @Override public String getRac() { return RAC; } @Override public String getAppName() { return config.get(CONFIG_CLUSTER_NAME, DEFAULT_CLUSTER_NAME); } @Override public String getBootClusterName() { return BOOTCLUSTER_NAME.get(); } @Override public String getElasticsearchDiscoveryType() { return ES_DISCOVERY_TYPE.get(); } @Override public boolean isMultiDC() { return IS_MULTI_DC_ENABLED.get(); } @Override public String getClusterRoutingAttributes() { return ES_CLUSTER_ROUTING_ATTRIBUTES.get(); } @Override public boolean isAsgBasedDedicatedDeployment() { return IS_ASG_BASED_DEPLOYMENT_ENABLED.get(); } @Override public String getElasticsearchProcessName() { return ES_PROCESS_NAME.get(); } /** * @return Elasticsearch Index Refresh Interval */ public String getIndexRefreshInterval() { return INDEX_REFRESH_INTERVAL.get(); } @Override public boolean doesElasticsearchStartManually() { return false; } @Override public String getClusterShardAllocationAttribute() { return ES_SHARD_ALLOCATION_ATTRIBUTE.get(); } @Override public boolean isCustomShardAllocationPolicyEnabled() { return IS_SHARD_ALLOCATION_POLICY_ENABLED.get(); } @Override public String getEsKeyName(String escarKey) { return config.get(escarKey); } @Override public boolean isDebugEnabled() { return IS_DEBUG_ENABLED.get(); } @Override public boolean isShardPerNodeEnabled() { return IS_SHARDS_PER_NODE_ENABLED.get(); } @Override public boolean isIndexAutoCreationEnabled() { return IS_INDEX_AUTOCREATION_ENABLED.get(); } @Override public String getIndexMetadata() { return INDEX_METADATA.get(); } @Override public int getAutoCreateIndexTimeout() { return AUTOCREATE_INDEX_TIMEOUT.get(); } @Override public int getAutoCreateIndexInitialStartDelaySeconds() { return AUTOCREATE_INDEX_INITIAL_START_DELAY_SECONDS.get(); } @Override public int getAutoCreateIndexScheduleMinutes() { return AUTOCREATE_INDEX_SCHEDULE_MINUTES.get(); } @Override public String getExtraConfigParams() { return EXTRA_PARAMS.get(); } @Override public int getBackupHour() { return BACKUP_HOUR.get(); } public boolean isSnapshotBackupEnabled() { return IS_SNAPSHOT_BACKUP_ENABLED.get(); } @Override public String getCommaSeparatedIndicesToBackup() { return COMMA_SEPARATED_INDICES_TO_BACKUP.get(); } @Override public boolean partiallyBackupIndices() { return PARTIALLY_BACKUP_INDICES.get(); } @Override public boolean includeGlobalStateDuringBackup() { return INCLUDE_GLOBAL_STATE_DURING_BACKUP.get(); } @Override public boolean waitForCompletionOfBackup() { return WAIT_FOR_COMPLETION_OF_BACKUP.get(); } @Override public boolean includeIndexNameInSnapshot() { return INCLUDE_INDEX_NAME_IN_SNAPSHOT_BACKUP.get(); } @Override public boolean isHourlySnapshotEnabled() { return IS_HOURLY_SNAPSHOT_BACKUP_ENABLED.get(); } @Override public long getBackupCronTimerInSeconds() { return BACKUP_CRON_TIMER_SECONDS.get(); } @Override public boolean isRestoreEnabled() { return IS_RESTORE_ENABLED.get(); } @Override public String getRestoreRepositoryName() { return RESTORE_REPOSITORY_NAME.get(); } @Override public String getRestoreSourceClusterName() { return RESTORE_SOURCE_CLUSTER_NAME.get(); } @Override public String getRestoreSourceRepositoryRegion() { return RESTORE_SOURCE_REPO_REGION.get(); } @Override public String getRestoreLocation() { return RESTORE_LOCATION.get(); } @Override public String getRestoreRepositoryType() { return RESTORE_REPOSITORY_TYPE.get(); } @Override public String getRestoreSnapshotName() { return RESTORE_SNAPSHOT_NAME.get(); } @Override public String getCommaSeparatedIndicesToRestore() { return COMMA_SEPARATED_INDICES_TO_RESTORE.get(); } @Override public int getRestoreTaskInitialDelayInSeconds() { return RESTORE_TASK_INITIAL_START_DELAY_SECONDS.get(); } @Override public boolean amITribeNode() { return AM_I_TRIBE_NODE.get(); } @Override public boolean amIWriteEnabledTribeNode() { return AM_I_WRITE_ENABLED_TRIBE_NODE.get(); } @Override public boolean amIMetadataEnabledTribeNode() { return AM_I_METADATA_ENABLED_TRIBE_NODE.get(); } @Override public String getCommaSeparatedSourceClustersForTribeNode() { return COMMA_SEPARATED_SOURCE_CLUSTERS_IN_TRIBE.get(); } @Override public boolean amISourceClusterForTribeNode() { return AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE.get(); } @Override public String getCommaSeparatedTribeClusterNames() { return COMMA_SEPARATED_TRIBE_CLUSTERS.get(); } @Override public boolean isNodeMismatchWithDiscoveryEnabled() { return IS_NODE_MISMATCH_WITH_DISCOVERY_ENABLED.get(); } @Override public int getDesiredNumberOfNodesInCluster() { return DESIRED_NUM_NODES_IN_CLUSTER.get(); } @Override public boolean isEurekaHealthCheckEnabled() { return IS_EUREKA_HEALTH_CHECK_ENABLED.get(); } @Override public boolean isLocalModeEnabled() { return IS_LOCAL_MODE_ENABLED.get(); } @Override public String getCassandraKeyspaceName() { return CASSANDRA_KEYSPACE_NAME.get(); } @Override public int getCassandraThriftPortForAstyanax() { return CASSANDRA_THRIFT_PORT.get(); } @Override public boolean isEurekaHostSupplierEnabled() { return IS_EUREKA_HOST_SUPPLIER_ENABLED.get(); } @Override public String getCommaSeparatedCassandraHostNames() { return COMMA_SEPARATED_CASSANDRA_HOSTNAMES.get(); } @Override public boolean isSecurityGroupInMultiDC() { return IS_SECURITY_GROUP_IN_MULTI_DC.get(); } @Override public boolean isKibanaSetupRequired() { return IS_KIBANA_SETUP_REQUIRED.get(); } @Override public int getKibanaPort() { return KIBANA_PORT.get(); } @Override public boolean amISourceClusterForTribeNodeInMultiDC() { return AM_I_SOURCE_CLUSTER_FOR_TRIBE_NODE_IN_MULTI_DC.get(); } @Override public boolean reportMetricsFromMasterOnly() { return REPORT_METRICS_FROM_MASTER_ONLY.get(); } @Override public String getTribePreferredClusterIdOnConflict() { return TRIBE_PREFERRED_CLUSTER_ID_ON_CONFLICT.get(); } @Override public String getEsNodeName() { return ES_NODE_NAME; } @Override public boolean isDeployedInVPC() { return IS_DEPLOYED_IN_VPC; } @Override public boolean isVPCExternal() { return IS_VPC_EXTERNAL; } @Override public String getACLGroupNameForVPC() { return ACL_GROUP_NAME_FOR_VPC.get(); } @Override public String getACLGroupIdForVPC() { return ACL_GROUP_ID_FOR_VPC; } @Override public void setACLGroupIdForVPC(String aclGroupIdForVPC) { ACL_GROUP_ID_FOR_VPC = aclGroupIdForVPC; } @Override public String getMacIdForInstance() { return MAC_ID; } }
5,074
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/RaigadConfigSource.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import javax.inject.Inject; public class RaigadConfigSource extends CompositeConfigSource { @Inject public RaigadConfigSource(final PropertiesConfigSource propertiesConfigSource, final SystemPropertiesConfigSource systemPropertiesConfigSource) { super(propertiesConfigSource, systemPropertiesConfigSource); } }
5,075
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/AbstractConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import java.util.List; import static com.google.common.base.Preconditions.checkNotNull; /** * Base implementations for most methods on {@link IConfigSource}. */ public abstract class AbstractConfigSource implements IConfigSource { private String asgName; private String region; @Override public void initialize(final String asgName, final String region) { this.asgName = checkNotNull(asgName, "ASG name is not defined"); this.region = checkNotNull(region, "Region is not defined"); } @Override public boolean contains(final String key) { return get(key) != null; } @Override public boolean isEmpty() { return size() == 0; } @Override public String get(final String key, final String defaultValue) { final String value = get(key); return (value != null) ? value : defaultValue; } @Override public boolean get(final String key, final boolean defaultValue) { final String value = get(key); if (value != null) { try { return Boolean.parseBoolean(value); } catch (Exception e) { // ignore and return default } } return defaultValue; } @Override public Class<?> get(final String key, final Class<?> defaultValue) { final String value = get(key); if (value != null) { try { return Class.forName(value); } catch (ClassNotFoundException e) { // ignore and return default } } return defaultValue; } @Override public <T extends Enum<T>> T get(final String key, final T defaultValue) { final String value = get(key); if (value != null) { try { return Enum.valueOf(defaultValue.getDeclaringClass(), value); } catch (Exception e) { // ignore and return default. } } return defaultValue; } @Override public int get(final String key, final int defaultValue) { final String value = get(key); if (value != null) { try { return Integer.parseInt(value); } catch (Exception e) { // ignore and return default } } return defaultValue; } @Override public long get(final String key, final long defaultValue) { final String value = get(key); if (value != null) { try { return Long.parseLong(value); } catch (Exception e) { // return default. } } return defaultValue; } @Override public float get(final String key, final float defaultValue) { final String value = get(key); if (value != null) { try { return Float.parseFloat(value); } catch (Exception e) { // ignore and return default; } } return defaultValue; } @Override public double get(final String key, final double defaultValue) { final String value = get(key); if (value != null) { try { return Double.parseDouble(value); } catch (Exception e) { // ignore and return default. } } return defaultValue; } @Override public List<String> getList(String prop) { return getList(prop, ImmutableList.<String>of()); } @Override public List<String> getList(String prop, List<String> defaultValue) { final String value = get(prop); if (value != null) { return getTrimmedStringList(value.split(",")); } return defaultValue; } protected String getAsgName() { return asgName; } protected String getRegion() { return region; } private List<String> getTrimmedStringList(String[] strings) { List<String> list = Lists.newArrayList(); for (String s : strings) { list.add(StringUtils.strip(s)); } return list; } }
5,076
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/MemoryConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.collect.Maps; import java.util.Map; public final class MemoryConfigSource extends AbstractConfigSource { private final Map<String, String> data = Maps.newConcurrentMap(); @Override public void initialize(IConfiguration config) { //NO OP as we initiaie using asgName and region } @Override public int size() { return data.size(); } @Override public String get(final String key) { return data.get(key); } @Override public void set(final String key, final String value) { data.put(key, value); } }
5,077
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/IConfiguration.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.inject.ImplementedBy; import java.util.List; @ImplementedBy(RaigadConfiguration.class) public interface IConfiguration { void initialize(); /** * @return Path to the home dir of Elasticsearch */ String getElasticsearchHome(); String getYamlLocation(); String getBackupLocation(); /** * @return Path to Elasticsearch startup script */ String getElasticsearchStartupScript(); /** * @return Path to Elasticsearch stop script */ String getElasticsearchStopScript(); int getTransportTcpPort(); int getHttpPort(); int getNumOfShards(); int getNumOfReplicas(); int getTotalShardsPerNode(); String getRefreshInterval(); boolean isMasterQuorumEnabled(); int getMinimumMasterNodes(); String getPingTimeout(); boolean isPingMulticastEnabled(); String getFdPingInterval(); String getFdPingTimeout(); /** * @return Location of the local data dir */ String getDataFileLocation(); /** * @return Location of the local log dir */ String getLogFileLocation(); boolean doesElasticsearchStartManually(); /** * @return Cluster name */ String getAppName(); /** * @return RAC (or zone for AWS) */ String getRac(); /** * @return List of all RAC used for the cluster */ List<String> getRacs(); /** * @return Local hostmame */ String getHostname(); /** * @return Get instance name (for AWS) */ String getInstanceName(); /** * @return Get instance id (for AWS) */ String getInstanceId(); /** * @return Get the Data Center name (or region for AWS) */ String getDC(); /** * @param dc Set the current data center */ void setDC(String dc); /** * Amazon specific setting to query ASG Membership */ String getASGName(); /** * Amazon specific setting to query ASG Membership */ String getStackName(); /** * Get the security group associated with nodes in this cluster */ String getACLGroupName(); /** * @return Get host IP */ String getHostIP(); /** * @return Get host Local IP */ String getHostLocalIP(); /** * @return Bootstrap cluster name (depends on another cass cluster) */ String getBootClusterName(); /** * @return Elasticsearch Process Name */ String getElasticsearchProcessName(); /** * @return Elasticsearch Discovery Type */ String getElasticsearchDiscoveryType(); /** * @return Whether it's a Multi-Region Setup */ boolean isMultiDC(); /** * @return Elasticsearch Index Refresh Interval */ String getIndexRefreshInterval(); String getClusterRoutingAttributes(); boolean isAsgBasedDedicatedDeployment(); boolean isCustomShardAllocationPolicyEnabled(); String getClusterShardAllocationAttribute(); /** * Providing a way to add New Config Params without any code change */ String getExtraConfigParams(); String getEsKeyName(String escarKey); boolean isDebugEnabled(); boolean isShardPerNodeEnabled(); boolean isIndexAutoCreationEnabled(); String getIndexMetadata(); int getAutoCreateIndexTimeout(); int getAutoCreateIndexInitialStartDelaySeconds(); int getAutoCreateIndexScheduleMinutes(); /* Backup related Config properties */ boolean isSnapshotBackupEnabled(); String getCommaSeparatedIndicesToBackup(); boolean partiallyBackupIndices(); boolean includeGlobalStateDuringBackup(); boolean waitForCompletionOfBackup(); boolean includeIndexNameInSnapshot(); boolean isHourlySnapshotEnabled(); long getBackupCronTimerInSeconds(); /** * @return Backup hour for snapshot backups (0 - 23) */ int getBackupHour(); /* Restore related Config properties */ boolean isRestoreEnabled(); String getRestoreRepositoryName(); String getRestoreSourceClusterName(); String getRestoreSourceRepositoryRegion(); String getRestoreLocation(); String getRestoreRepositoryType(); String getRestoreSnapshotName(); String getCommaSeparatedIndicesToRestore(); int getRestoreTaskInitialDelayInSeconds(); boolean amITribeNode(); boolean amIWriteEnabledTribeNode(); boolean amIMetadataEnabledTribeNode(); String getCommaSeparatedSourceClustersForTribeNode(); boolean amISourceClusterForTribeNode(); String getCommaSeparatedTribeClusterNames(); boolean isNodeMismatchWithDiscoveryEnabled(); int getDesiredNumberOfNodesInCluster(); boolean isEurekaHealthCheckEnabled(); boolean isLocalModeEnabled(); String getCassandraKeyspaceName(); int getCassandraThriftPortForAstyanax(); boolean isEurekaHostSupplierEnabled(); String getCommaSeparatedCassandraHostNames(); boolean isSecurityGroupInMultiDC(); boolean isKibanaSetupRequired(); int getKibanaPort(); /** * @return Whether current cluster is Single Region cluster but is a Source Cluster in Multi-Region Tribe Node Setup */ boolean amISourceClusterForTribeNodeInMultiDC(); boolean reportMetricsFromMasterOnly(); /** * To prefer the index from a specific tribe * * @return tribe id */ String getTribePreferredClusterIdOnConflict(); String getEsNodeName(); /** * Check if instance is deployed in VPC * * @return true or false */ boolean isDeployedInVPC(); /** * Check if instance is deployed in VPC external * * @return true or false */ boolean isVPCExternal(); /** * Get the security group associated with nodes in this cluster in VPC */ String getACLGroupNameForVPC(); /** * Get the security group id for given Security Group in VPC */ String getACLGroupIdForVPC(); /** * Set the security group id for given Security Group in VPC */ void setACLGroupIdForVPC(String aclGroupIdForVPC); /** * Get the MAC id for an instance */ String getMacIdForInstance(); }
5,078
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/IConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.inject.ImplementedBy; import java.util.List; /** * Defines the configurations for an application. */ @ImplementedBy(RaigadConfigSource.class) public interface IConfigSource { /** * Must be called before any other method. This method will allow implementations to do any setup that they require * before being called. */ void initialize(String asgName, String region); /* * An alternative means of initialization, allowing implementaiton to do setup using configuration. */ void initialize(IConfiguration config); /** * A non-negative integer indicating a count of elements. * * @return non-negative integer indicating a count of elements. */ int size(); /** * Returns {@code true} if the size is zero. May be more efficient than calculating size. * * @return {@code true} if the size is zero otherwise {@code false}. */ boolean isEmpty(); /** * Check if the given key can be found in the config. * * @param key to look up value. * @return if the key is present */ boolean contains(String key); /** * Get a String associated with the given configuration key. * * @param key to look up value. * @return value from config or null if not present. */ String get(String key); /** * Get a String associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ String get(String key, String defaultValue); /** * Get a boolean associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ boolean get(String key, boolean defaultValue); /** * Get a Class associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ Class<?> get(String key, Class<?> defaultValue); /** * Get a Enum associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @param <T> enum type. * @return value from config or defaultValue if not present. */ <T extends Enum<T>> T get(String key, T defaultValue); /** * Get a int associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ int get(String key, int defaultValue); /** * Get a long associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ long get(String key, long defaultValue); /** * Get a float associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ float get(String key, float defaultValue); /** * Get a double associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ double get(String key, double defaultValue); /** * Get a list of strings associated with the given configuration key. * * @param key to look up value. * @return value from config or an immutable list if not present. */ List<String> getList(String key); /** * Get a list of strings associated with the given configuration key. * * @param key to look up value. * @param defaultValue if value is not present. * @return value from config or defaultValue if not present. */ List<String> getList(String key, List<String> defaultValue); /** * Set the value for the given key. * * @param key to set value for. * @param value to set. */ void set(String key, String value); }
5,079
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/PropertiesConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Maps; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URL; import java.util.Map; import java.util.Properties; import static com.google.common.base.Preconditions.checkNotNull; /** * Loads the 'Raigad.properties' file as a source. */ public class PropertiesConfigSource extends AbstractConfigSource { private static final Logger logger = LoggerFactory.getLogger(PropertiesConfigSource.class.getName()); private static final String DEFAULT_RAIGAD_PROPERTIES = "Raigad.properties"; private final Map<String, String> data = Maps.newConcurrentMap(); private final String raigadFile; public PropertiesConfigSource() { this.raigadFile = DEFAULT_RAIGAD_PROPERTIES; } public PropertiesConfigSource(final Properties properties) { checkNotNull(properties); this.raigadFile = DEFAULT_RAIGAD_PROPERTIES; clone(properties); } @VisibleForTesting PropertiesConfigSource(final String file) { this.raigadFile = checkNotNull(file); } @Override public void initialize(final String asgName, final String region) { super.initialize(asgName, region); Properties properties = new Properties(); URL url = PropertiesConfigSource.class.getClassLoader().getResource(raigadFile); if (url != null) { try { properties.load(url.openStream()); clone(properties); } catch (IOException e) { logger.info("No Raigad.properties. Ignore!"); } } else { logger.info("No Raigad.properties. Ignore!"); } } @Override public void initialize(IConfiguration config) { //NO OP as we initiaie using asgName and region } @Override public String get(final String prop) { return data.get(prop); } @Override public void set(final String key, final String value) { Preconditions.checkNotNull(value, "Value can not be null for configurations."); data.put(key, value); } @Override public int size() { return data.size(); } @Override public boolean contains(final String prop) { return data.containsKey(prop); } /** * Clones all the values from the properties. If the value is null, it will be ignored. * * @param properties to clone */ private void clone(final Properties properties) { if (properties.isEmpty()) return; synchronized (properties) { for (final String key : properties.stringPropertyNames()) { final String value = properties.getProperty(key); if (!Strings.isNullOrEmpty(value)) { data.put(key, value); } } } } }
5,080
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/CustomConfigSource.java
package com.netflix.raigad.configuration; import java.util.List; /* Currently, a noop config source */ public class CustomConfigSource implements IConfigSource { @Override public void initialize(String asgName, String region) { } @Override public void initialize(IConfiguration config) { //NO OP } @Override public int size() { return 0; } @Override public boolean isEmpty() { return false; } @Override public boolean contains(String key) { return false; } @Override public String get(String key) { return null; } @Override public String get(String key, String defaultValue) { return null; } @Override public boolean get(String key, boolean defaultValue) { return false; } @Override public Class<?> get(String key, Class<?> defaultValue) { return null; } @Override public <T extends Enum<T>> T get(String key, T defaultValue) { return null; } @Override public int get(String key, int defaultValue) { return 0; } @Override public long get(String key, long defaultValue) { return 0; } @Override public float get(String key, float defaultValue) { return 0; } @Override public double get(String key, double defaultValue) { return 0; } @Override public List<String> getList(String key) { return null; } @Override public List<String> getList(String key, List<String> defaultValue) { return null; } @Override public void set(String key, String value) { } }
5,081
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/CompositeConfigSource.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.util.Collection; /** * A {@link IConfigSource} that delegates method calls to the underline sources. The order in which values are provided * depend on the {@link IConfigSource}s provided. If user asks for key 'foo', and this composite has three sources, it * will first check if the key is found in the first source, if not it will check the second and if not, the third, else * return null or false if {@link #contains(String)} was called. * <p/> * Implementation note: get methods with a default are implemented in {@link AbstractConfigSource}, if the underlying * source overrides one of these methods, then that implementation will be ignored. */ public class CompositeConfigSource extends AbstractConfigSource { private final ImmutableCollection<? extends IConfigSource> sources; public CompositeConfigSource(final ImmutableCollection<? extends IConfigSource> sources) { Preconditions.checkArgument(!sources.isEmpty(), "Can not create a composite config source without config sources!"); this.sources = sources; } public CompositeConfigSource(final Collection<? extends IConfigSource> sources) { this(ImmutableList.copyOf(sources)); } public CompositeConfigSource(final Iterable<? extends IConfigSource> sources) { this(ImmutableList.copyOf(sources)); } public CompositeConfigSource(final IConfigSource... sources) { this(ImmutableList.copyOf(sources)); } @Override public void initialize(final String asgName, final String region) { for (final IConfigSource source : sources) { //TODO should this catch any potential exceptions? source.initialize(asgName, region); } } @Override public void initialize(IConfiguration config) { //NO OP as we initiaie using asgName and region } @Override public int size() { int size = 0; for (final IConfigSource c : sources) { size += c.size(); } return size; } @Override public boolean isEmpty() { return size() == 0; } @Override public boolean contains(final String key) { return get(key) != null; } @Override public String get(final String key) { Preconditions.checkNotNull(key); for (final IConfigSource c : sources) { final String value = c.get(key); if (value != null) { return value; } } return null; } @Override public void set(final String key, final String value) { Preconditions.checkNotNull(value, "Value can not be null for configurations."); final IConfigSource firstSource = Iterables.getFirst(sources, null); // firstSource shouldn't be null because the collection is immutable, and the collection is non empty. Preconditions.checkState(firstSource != null, "There was no IConfigSource found at the first location?"); firstSource.set(key, value); } }
5,082
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/configuration/SystemPropertiesConfigSource.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.configuration; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import org.apache.commons.lang.StringUtils; import java.util.Map; import java.util.Properties; /** * Loads {@link System#getProperties()} as a source. * <p/> * Implementation note: {@link #set(String, String)} does not write to system properties, but will write to a new map. * This means that setting values to this source has no effect on system properties or other instances of this class. */ public final class SystemPropertiesConfigSource extends AbstractConfigSource { private final Map<String, String> data = Maps.newConcurrentMap(); @Override public void initialize(final String asgName, final String region) { super.initialize(asgName, region); Properties systemProps = System.getProperties(); for (final String key : systemProps.stringPropertyNames()) { if (!key.startsWith(RaigadConfiguration.MY_WEBAPP_NAME)) { continue; } final String value = systemProps.getProperty(key); if (!StringUtils.isEmpty(value)) { data.put(key, value); } } } @Override public void initialize(IConfiguration config) { //NO OP as we initiaie using asgName and region } @Override public int size() { return data.size(); } @Override public String get(final String key) { return data.get(key); } @Override public void set(final String key, final String value) { Preconditions.checkNotNull(value, "Value can not be null for configurations"); data.put(key, value); } }
5,083
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/InstanceManager.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.utils.RetriableCallable; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; /** * This class provides the central place to create and consume the identity of the instance */ @Singleton public class InstanceManager { private static final Logger logger = LoggerFactory.getLogger(InstanceManager.class); private static final String COMMA_SEPARATOR = ","; private static final String PARAM_SEPARATOR = "="; private final IRaigadInstanceFactory instanceFactory; private final IMembership membership; private final IConfiguration config; private RaigadInstance thisInstance; @Inject public InstanceManager(IRaigadInstanceFactory instanceFactory, IMembership membership, IConfiguration config) throws Exception { this.instanceFactory = instanceFactory; this.membership = membership; this.config = config; init(); } private void init() throws Exception { logger.info("Deregistering dead instances"); new RetriableCallable<Void>() { @Override public Void retriableCall() throws Exception { deregisterInstance(instanceFactory, config); return null; } }.call(); logger.info("Registering this instance"); thisInstance = new RetriableCallable<RaigadInstance>() { @Override public RaigadInstance retriableCall() throws Exception { RaigadInstance instance = registerInstance(instanceFactory, config); return instance; } }.call(); logger.info("Raigad instance details: " + thisInstance.toString()); } private RaigadInstance registerInstance(IRaigadInstanceFactory instanceFactory, IConfiguration config) throws Exception { return instanceFactory.create( config.getAppName(), config.getDC() + "." + config.getInstanceId(), config.getInstanceId(), config.getHostname(), config.getHostIP(), config.getRac(), config.getDC(), config.getASGName(), null); } private void deregisterInstance(IRaigadInstanceFactory instanceFactory, IConfiguration config) throws Exception { final List<RaigadInstance> allInstances = getInstanceList(); HashSet<String> asgNames = new HashSet<>(); for (RaigadInstance raigadInstance : allInstances) { if (!asgNames.contains(raigadInstance.getAsg())) { asgNames.add(raigadInstance.getAsg()); } } logger.info("Known instances: {}", allInstances); logger.info("Known ASG's: {}", StringUtils.join(asgNames, ",")); Map<String, List<String>> instancesPerAsg = membership.getRacMembership(asgNames); logger.info("Known instances per ASG: {}", instancesPerAsg); for (RaigadInstance knownInstance : allInstances) { // Test same region and if it is alive. // TODO: Provide a config property to choose same DC/Region if (instancesPerAsg.containsKey(knownInstance.getAsg())) { if (!knownInstance.getAsg().equals(config.getASGName())) { logger.info("Skipping {} - different ASG", knownInstance.getInstanceId()); continue; } if (!knownInstance.getAvailabilityZone().equals(config.getRac())) { logger.info("Skipping {} - different AZ", knownInstance.getInstanceId()); continue; } if (instancesPerAsg.get(config.getASGName()).contains(knownInstance.getInstanceId())) { logger.info("Skipping {} - legitimate node", knownInstance.getInstanceId()); continue; } logger.info("Found dead instance: " + knownInstance.getInstanceId()); instanceFactory.delete(knownInstance); } else if (config.isMultiDC()) { logger.info("Multi DC setup, skipping unknown instances (" + knownInstance.getInstanceId() + ")"); } else if (config.amISourceClusterForTribeNode()) { logger.info("Tribe setup, skipping unknown instances (" + knownInstance.getInstanceId() + ")"); } else { logger.info("Found dead instance: " + knownInstance.getInstanceId()); instanceFactory.delete(knownInstance); } } } public RaigadInstance getInstance() { return thisInstance; } public List<RaigadInstance> getAllInstances() { return getInstanceList(); } private List<RaigadInstance> getInstanceList() { List<RaigadInstance> instances = new ArrayList<RaigadInstance>(); // Considering same cluster will not serve as a tribe node and source cluster for the tribe node if (config.amITribeNode()) { String clusterParams = config.getCommaSeparatedSourceClustersForTribeNode(); assert (clusterParams != null) : "I am a tribe node but I need one or more source clusters"; String[] clusters = StringUtils.split(clusterParams, COMMA_SEPARATOR); assert (clusters.length != 0) : "One or more clusters needed"; List<String> sourceClusters = new ArrayList<>(); // Adding current cluster sourceClusters.add(config.getAppName()); // Common settings for (int i = 0; i < clusters.length; i ++) { String[] clusterAndPort = clusters[i].split(PARAM_SEPARATOR); assert (clusterAndPort.length != 2) : "Cluster name or transport port is missing in configuration"; sourceClusters.add(clusterAndPort[0]); logger.info("Adding cluster = <{}> ", clusterAndPort[0]); } for (String sourceClusterName : sourceClusters) { instances.addAll(instanceFactory.getAllIds(sourceClusterName)); } logger.info("Printing tribe node related nodes..."); for (RaigadInstance instance:instances) { logger.info(instance.toString()); } } else { instances.addAll(instanceFactory.getAllIds(config.getAppName())); } if (config.isDebugEnabled()) { for (RaigadInstance instance : instances) { logger.debug(instance.toString()); } } return instances; } public List<RaigadInstance> getAllInstancesPerCluster(String clusterName) { return getInstanceListPerCluster(clusterName); } private List<RaigadInstance> getInstanceListPerCluster(String clusterName) { List<RaigadInstance> instances = new ArrayList<RaigadInstance>(); instances.addAll(instanceFactory.getAllIds(clusterName.trim().toLowerCase())); if (config.isDebugEnabled()) { for (RaigadInstance instance : instances) { logger.debug(instance.toString()); } } return instances; } public boolean isMaster() { //For non-dedicated deployments, return true (every node can be a master) return (!config.isAsgBasedDedicatedDeployment() || config.getASGName().toLowerCase().contains("master")); } }
5,084
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/RaigadInstance.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import java.io.Serializable; public class RaigadInstance implements Serializable { private static final long serialVersionUID = 5606412386974488659L; private String hostname; private long updatetime; private boolean outOfService; private String Id; private String app; private String instanceId; private String availabilityZone; private String publicip; private String dc; private String asgName; public String getId() { return Id; } public void setId(String id) { this.Id = id; } public String getApp() { return app; } public void setApp(String app) { this.app = app; } public String getInstanceId() { return instanceId; } public void setInstanceId(String instanceId) { this.instanceId = instanceId; } public String getAvailabilityZone() { return availabilityZone; } public void setAvailabilityZone(String availabilityZone) { this.availabilityZone = availabilityZone; } public String getHostName() { return hostname; } public String getHostIP() { return publicip; } public void setHostName(String hostname) { this.hostname = hostname; } public void setHostIP(String publicip) { this.publicip = publicip; } @Override public String toString() { return String .format("Hostname: %s, InstanceId: %s, App: %s, AvailabilityZone : %s, Id : %s, PublicIp : %s, DC : %s, ASG : %s, UpdateTime : %s", getHostName(), getInstanceId(), getApp(), getAvailabilityZone(), getId(), getHostIP(), getDC(), getAsg(), getUpdatetime()); } public String getDC() { return dc; } public void setDC(String dc) { this.dc = dc; } public String getAsg() { return asgName; } public void setAsg(String asgName) { this.asgName = asgName; } public long getUpdatetime() { return updatetime; } public void setUpdatetime(long updatetime) { this.updatetime = updatetime; } public boolean isOutOfService() { return outOfService; } public void setOutOfService(boolean outOfService) { this.outOfService = outOfService; } }
5,085
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/CassandraInstanceFactory.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.raigad.configuration.IConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** * Factory to use Cassandra for managing instance data */ @Singleton public class CassandraInstanceFactory implements IRaigadInstanceFactory { private static final Logger logger = LoggerFactory.getLogger(CassandraInstanceFactory.class); @Inject IConfiguration config; @Inject InstanceDataDAOCassandra dao; @Override public RaigadInstance create(String app, String id, String instanceID, String hostname, String ip, String zone, String dc, String asgName, Map<String, Object> volumes) { try { logger.info("Creating entry for instance {} (node ID {}, hostname {}, IP {}) in {} ES cluster in {}, {}", instanceID, id, hostname, ip, app, zone, dc); RaigadInstance raigadInstance = new RaigadInstance(); raigadInstance.setAvailabilityZone(zone); raigadInstance.setHostIP(ip); raigadInstance.setHostName(hostname); raigadInstance.setId(id); raigadInstance.setInstanceId(instanceID); raigadInstance.setDC(dc); raigadInstance.setApp(app); raigadInstance.setAsg(asgName); dao.createInstanceEntry(raigadInstance); return raigadInstance; } catch (Exception e) { logger.error(e.getMessage()); throw new RuntimeException(e); } } @Override public List<RaigadInstance> getAllIds(String appName) { List<RaigadInstance> raigadInstances = new ArrayList<>(dao.getAllInstances(appName)); if (config.isDebugEnabled()) { for (RaigadInstance instance : raigadInstances) { logger.debug("Instance details: " + instance.getInstanceId()); } } return raigadInstances; } @Override public RaigadInstance getInstance(String appName, String dc, String id) { return dao.getInstance(appName, dc, id); } @Override public void sort(List<RaigadInstance> list) { Collections.sort(list, new Comparator<RaigadInstance>() { @Override public int compare(RaigadInstance esInstance1, RaigadInstance esInstance2) { int azCompare = esInstance1.getAvailabilityZone().compareTo( esInstance2.getAvailabilityZone()); if (azCompare == 0) { return esInstance1.getId().compareTo(esInstance2.getId()); } else { return azCompare; } } }); } @Override public void delete(RaigadInstance instance) { try { dao.deleteInstanceEntry(instance); } catch (Exception e) { logger.error(e.getMessage()); throw new RuntimeException("Unable to deregister Raigad instance", e); } } @Override public void update(RaigadInstance arg0) { // TODO Auto-generated method stub } @Override public void attachVolumes(RaigadInstance arg0, String arg1, String arg2) { // TODO Auto-generated method stub } }
5,086
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/HostSupplier.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import com.google.common.base.Supplier; import com.google.inject.ImplementedBy; import com.netflix.astyanax.connectionpool.Host; import java.util.List; @ImplementedBy(EurekaHostsSupplier.class) public interface HostSupplier { public Supplier<List<Host>> getSupplier(String clusterName); }
5,087
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/InstanceDataDAOCassandra.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import com.google.common.base.Supplier; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.astyanax.AstyanaxContext; import com.netflix.astyanax.ColumnListMutation; import com.netflix.astyanax.Keyspace; import com.netflix.astyanax.MutationBatch; import com.netflix.astyanax.connectionpool.Host; import com.netflix.astyanax.connectionpool.NodeDiscoveryType; import com.netflix.astyanax.connectionpool.OperationResult; import com.netflix.astyanax.connectionpool.exceptions.ConnectionException; import com.netflix.astyanax.connectionpool.impl.ConnectionPoolConfigurationImpl; import com.netflix.astyanax.connectionpool.impl.ConnectionPoolType; import com.netflix.astyanax.connectionpool.impl.CountingConnectionPoolMonitor; import com.netflix.astyanax.impl.AstyanaxConfigurationImpl; import com.netflix.astyanax.model.*; import com.netflix.astyanax.serializers.StringSerializer; import com.netflix.astyanax.thrift.ThriftFamilyFactory; import com.netflix.astyanax.util.TimeUUIDUtils; import com.netflix.raigad.configuration.IConfiguration; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; /** * Use bootstrap cluster to find nodes */ @Singleton public class InstanceDataDAOCassandra { private static final Logger logger = LoggerFactory.getLogger(InstanceDataDAOCassandra.class); private static final String CN_CLUSTER = "cluster"; private static final String CN_AZ = "availabilityZone"; private static final String CN_INSTANCEID = "instanceId"; private static final String CN_HOSTNAME = "hostname"; private static final String CN_IP = "ip"; private static final String CN_LOCATION = "location"; private static final String CN_ASGNAME = "asgname"; private static final String CN_UPDATETIME = "updatetime"; public static final String CF_NAME_INSTANCES = "instances"; public static final String CF_NAME_LOCKS = "locks"; private final Keyspace bootKeyspace; private final IConfiguration config; private final EurekaHostsSupplier eurekaHostsSupplier; private final String BOOT_CLUSTER; private final String KS_NAME; private final int thriftPortForAstyanax; private final AstyanaxContext<Keyspace> ctx; public static final ColumnFamily<String, String> CF_INSTANCES = new ColumnFamily<String, String>(CF_NAME_INSTANCES, StringSerializer.get(), StringSerializer.get()); public static final ColumnFamily<String, String> CF_LOCKS = new ColumnFamily<String, String>(CF_NAME_LOCKS, StringSerializer.get(), StringSerializer.get()); @Inject public InstanceDataDAOCassandra(IConfiguration config, EurekaHostsSupplier eurekaHostsSupplier) throws ConnectionException { this.config = config; BOOT_CLUSTER = config.getBootClusterName(); if (BOOT_CLUSTER == null || BOOT_CLUSTER.isEmpty()) { throw new RuntimeException("Boot cluster can not be blank. Please use getBootClusterName() property"); } KS_NAME = config.getCassandraKeyspaceName(); if (KS_NAME == null || KS_NAME.isEmpty()) { throw new RuntimeException("Cassandra keyspace can not be blank. Please use getCassandraKeyspaceName() property"); } thriftPortForAstyanax = config.getCassandraThriftPortForAstyanax(); if (thriftPortForAstyanax <= 0) { throw new RuntimeException("Thrift port for Astyanax can not be blank. Please use getCassandraThriftPortForAstyanax() property"); } this.eurekaHostsSupplier = eurekaHostsSupplier; if (config.isEurekaHostSupplierEnabled()) { ctx = initWithThriftDriverWithEurekaHostsSupplier(); } else { ctx = initWithThriftDriverWithExternalHostsSupplier(); } ctx.start(); bootKeyspace = ctx.getClient(); } public void createInstanceEntry(RaigadInstance instance) throws Exception { logger.info("Creating new instance entry"); String key = getRowKey(instance); // If the key exists throw exception if (getInstance(instance.getApp(), instance.getDC(), instance.getId()) != null) { logger.info(String.format("Key already exists: %s", key)); return; } // Grab the lock getLock(instance); MutationBatch mutationBatch = bootKeyspace.prepareMutationBatch(); ColumnListMutation<String> columnListMutation = mutationBatch.withRow(CF_INSTANCES, key); columnListMutation.putColumn(CN_CLUSTER, instance.getApp(), null); columnListMutation.putColumn(CN_AZ, instance.getAvailabilityZone(), null); columnListMutation.putColumn(CN_INSTANCEID, instance.getInstanceId(), null); columnListMutation.putColumn(CN_HOSTNAME, instance.getHostName(), null); columnListMutation.putColumn(CN_IP, instance.getHostIP(), null); columnListMutation.putColumn(CN_LOCATION, instance.getDC(), null); columnListMutation.putColumn(CN_ASGNAME, instance.getAsg(), null); columnListMutation.putColumn(CN_UPDATETIME, TimeUUIDUtils.getUniqueTimeUUIDinMicros(), null); mutationBatch.execute(); } public RaigadInstance getInstance(String cluster, String region, String instanceId) { List<RaigadInstance> instances = getAllInstances(cluster); for (RaigadInstance instance : instances) { if (instance.getInstanceId().equals(instanceId) && instance.getDC().equals(region)) { return instance; } } return null; } public List<RaigadInstance> getAllInstances(String cluster) { List<RaigadInstance> list = new ArrayList<RaigadInstance>(); try { String selectClause; if (config.isMultiDC() || config.amISourceClusterForTribeNodeInMultiDC()) { selectClause = String.format("SELECT * FROM %s WHERE %s = '%s' ", CF_NAME_INSTANCES, CN_CLUSTER, cluster); } else { selectClause = String.format("SELECT * FROM %s WHERE %s = '%s' AND %s = '%s' ", CF_NAME_INSTANCES, CN_CLUSTER, cluster, CN_LOCATION, config.getDC()); } if (config.isDebugEnabled()) { logger.debug("Getting nodes for {}: {}", cluster, selectClause); } final ColumnFamily<String, String> CF_INSTANCES_NEW = ColumnFamily.newColumnFamily( KS_NAME, StringSerializer.get(), StringSerializer.get()); OperationResult<CqlResult<String, String>> result = bootKeyspace.prepareQuery(CF_INSTANCES_NEW).withCql(selectClause).execute(); for (Row<String, String> row : result.getResult().getRows()) { list.add(transform(row.getColumns())); } } catch (Exception e) { logger.warn("Caught unknown exception while reading: {}", e.getMessage()); throw new RuntimeException(e); } if (config.isDebugEnabled()) { for (RaigadInstance instance : list) { logger.debug("Read instance: {}", instance.toString()); } } return list; } public void deleteInstanceEntry(RaigadInstance instance) throws Exception { logger.info("Deleting dead instance entry"); // Acquire the lock first getLock(instance); // Delete the row String key = findKey(instance.getApp(), instance.getInstanceId(), instance.getDC()); if (key == null) { return; // don't fail it } MutationBatch m = bootKeyspace.prepareMutationBatch(); m.withRow(CF_INSTANCES, key).delete(); m.execute(); key = getLockingKey(instance); // Delete key m = bootKeyspace.prepareMutationBatch(); m.withRow(CF_LOCKS, key).delete(); m.execute(); // Have to delete choosing key as well to avoid issues with delete // followed by immediate writes key = getChoosingKey(instance); m = bootKeyspace.prepareMutationBatch(); m.withRow(CF_LOCKS, key).delete(); m.execute(); } protected void sort(List<RaigadInstance> list) { Collections.sort(list, new Comparator<RaigadInstance>() { @Override public int compare(RaigadInstance esInstance1, RaigadInstance esInstance2) { int azCompare = esInstance1.getAvailabilityZone().compareTo(esInstance2.getAvailabilityZone()); if (azCompare == 0) { return esInstance1.getId().compareTo(esInstance2.getId()); } else { return azCompare; } } }); } /* * To get a lock on the row - Create a choosing row and make sure there are * no contenders. If there are bail out. Also delete the column when bailing * out. - Once there are no contenders, grab the lock if it is not already * taken. */ private void getLock(RaigadInstance instance) throws Exception { String choosingkey = getChoosingKey(instance); MutationBatch m = bootKeyspace.prepareMutationBatch(); ColumnListMutation<String> clm = m.withRow(CF_LOCKS, choosingkey); // Expire in 6 sec clm.putColumn(instance.getInstanceId(), instance.getInstanceId(), new Integer(6)); m.execute(); int count = bootKeyspace.prepareQuery(CF_LOCKS).getKey(choosingkey).getCount().execute().getResult(); if (count > 1) { // Need to delete my entry m.withRow(CF_LOCKS, choosingkey).deleteColumn(instance.getInstanceId()); m.execute(); throw new Exception(String.format("More than 1 contender for lock %s %d", choosingkey, count)); } String lockKey = getLockingKey(instance); OperationResult<ColumnList<String>> result = bootKeyspace.prepareQuery(CF_LOCKS).getKey(lockKey).execute(); if (result.getResult().size() > 0 && !result.getResult().getColumnByIndex(0).getName().equals(instance.getInstanceId())) { throw new Exception(String.format("Lock already taken %s", lockKey)); } clm = m.withRow(CF_LOCKS, lockKey); clm.putColumn(instance.getInstanceId(), instance.getInstanceId(), new Integer(600)); m.execute(); Thread.sleep(100); result = bootKeyspace.prepareQuery(CF_LOCKS).getKey(lockKey).execute(); if (result.getResult().size() == 1 && result.getResult().getColumnByIndex(0).getName().equals(instance.getInstanceId())) { logger.info("Got lock " + lockKey); return; } else { throw new Exception(String.format("Cannot insert lock %s", lockKey)); } } public String findKey(String cluster, String instanceId, String dc) { try { final String selectClause = String.format( "SELECT * FROM %s WHERE %s = '%s' and %s = '%s' and %s = '%s' ", CF_NAME_INSTANCES, CN_CLUSTER, cluster, CN_INSTANCEID, instanceId, CN_LOCATION, dc); logger.info(selectClause); final ColumnFamily<String, String> CF_INSTANCES_NEW = ColumnFamily.newColumnFamily(KS_NAME, StringSerializer.get(), StringSerializer.get()); OperationResult<CqlResult<String, String>> result = bootKeyspace.prepareQuery(CF_INSTANCES_NEW) .withCql(selectClause).execute(); if (result == null || result.getResult().getRows().size() == 0) { return null; } Row<String, String> row = result.getResult().getRows().getRowByIndex(0); return row.getKey(); } catch (Exception e) { logger.warn("Caught an Unknown Exception during find a row matching cluster[" + cluster + "], id[" + instanceId + "], and region[" + dc + "] ... -> " + e.getMessage()); throw new RuntimeException(e); } } private RaigadInstance transform(ColumnList<String> columns) { RaigadInstance instance = new RaigadInstance(); Map<String, String> columnMap = new HashMap<>(); for (Column<String> column : columns) { columnMap.put(column.getName(), column.getStringValue()); if (column.getName().equals(CN_CLUSTER)) { instance.setUpdatetime(column.getTimestamp()); } } instance.setId(columnMap.get(CN_LOCATION) + "." + columnMap.get(CN_INSTANCEID)); instance.setApp(columnMap.get(CN_CLUSTER)); instance.setAvailabilityZone(columnMap.get(CN_AZ)); instance.setHostName(columnMap.get(CN_HOSTNAME)); instance.setHostIP(columnMap.get(CN_IP)); instance.setInstanceId(columnMap.get(CN_INSTANCEID)); instance.setAsg(columnMap.get(CN_ASGNAME)); instance.setDC(columnMap.get(CN_LOCATION)); return instance; } private String getChoosingKey(RaigadInstance instance) { return instance.getApp() + "_" + instance.getDC() + "_" + instance.getInstanceId() + "-choosing"; } private String getLockingKey(RaigadInstance instance) { return instance.getApp() + "_" + instance.getDC() + "_" + instance.getInstanceId() + "-lock"; } private String getRowKey(RaigadInstance instance) { return instance.getApp() + "_" + instance.getDC() + "_" + instance.getInstanceId(); } private AstyanaxContext<Keyspace> initWithThriftDriverWithEurekaHostsSupplier() { logger.info("Boot cluster (BOOT_CLUSTER) is {}, keyspace name (KS_NAME) is {}", BOOT_CLUSTER, KS_NAME); return new AstyanaxContext.Builder() .forCluster(BOOT_CLUSTER) .forKeyspace(KS_NAME) .withAstyanaxConfiguration( new AstyanaxConfigurationImpl() .setDiscoveryType( NodeDiscoveryType.DISCOVERY_SERVICE)) .withConnectionPoolConfiguration( new ConnectionPoolConfigurationImpl( "MyConnectionPool") .setMaxConnsPerHost(3) .setPort(thriftPortForAstyanax)) .withHostSupplier(eurekaHostsSupplier.getSupplier(BOOT_CLUSTER)) .withConnectionPoolMonitor(new CountingConnectionPoolMonitor()) .buildKeyspace(ThriftFamilyFactory.getInstance()); } private AstyanaxContext<Keyspace> initWithThriftDriverWithExternalHostsSupplier() { logger.info("Boot cluster (BOOT_CLUSTER) is {}, keyspace name (KS_NAME) is {}", BOOT_CLUSTER, KS_NAME); return new AstyanaxContext.Builder() .forCluster(BOOT_CLUSTER) .forKeyspace(KS_NAME) .withAstyanaxConfiguration( new AstyanaxConfigurationImpl() .setDiscoveryType( NodeDiscoveryType.DISCOVERY_SERVICE) .setConnectionPoolType( ConnectionPoolType.ROUND_ROBIN)) .withConnectionPoolConfiguration( new ConnectionPoolConfigurationImpl( "MyConnectionPool") .setMaxConnsPerHost(3) .setPort(thriftPortForAstyanax)) .withHostSupplier(getSupplier()) .withConnectionPoolMonitor(new CountingConnectionPoolMonitor()) .buildKeyspace(ThriftFamilyFactory.getInstance()); } private Supplier<List<Host>> getSupplier() { return new Supplier<List<Host>>() { @Override public List<Host> get() { List<Host> hosts = new ArrayList<>(); List<String> cassandraHostnames = new ArrayList<>(Arrays.asList(StringUtils.split(config.getCommaSeparatedCassandraHostNames(), ","))); if (cassandraHostnames.size() == 0) { throw new RuntimeException("Cassandra host names can not be blank, at least one host is needed." + "Please use getCommaSeparatedCassandraHostNames() property."); } for (String cassHost : cassandraHostnames) { logger.info("Adding Cassandra host {}", cassHost); hosts.add(new Host(cassHost, thriftPortForAstyanax)); } return hosts; } }; } }
5,088
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/IMembership.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import com.google.inject.ImplementedBy; import com.netflix.raigad.aws.AWSMembership; import java.util.Collection; import java.util.List; import java.util.Map; /** * Interface to manage membership meta information such as size of RAC, list of * nodes in RAC etc. Also perform ACL updates used in multi-regional clusters */ @ImplementedBy(AWSMembership.class) public interface IMembership { /** * Get a list of instances per RAC */ Map<String, List<String>> getRacMembership(Collection<String> autoScalingGroupNames); /** * @return Size of current RAC */ int getRacMembershipSize(); /** * Number of RACs */ int getRacCount(); /** * Add security group ACLs * * @param listIPs * @param from * @param to */ void addACL(Collection<String> listIPs, int from, int to); /** * Remove security group ACLs * * @param listIPs * @param from * @param to */ void removeACL(Collection<String> listIPs, int from, int to); /** * List all ACLs */ List<String> listACL(int from, int to); /** * Expand the membership size by 1 * * @param count */ void expandRacMembership(int count); /** * Return from-to ports for given ACL * @param acl * @return ACL to ports map (from-to), eg. 1.2.3.4 -> 5001, 5002 */ Map<String, List<Integer>> getACLPortMap(String acl); }
5,089
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/IRaigadInstanceFactory.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import java.util.List; import java.util.Map; /** * Interface for managing Elasticsearch instance data. * Provides functionality to register, update, delete or list instances from the registry. */ public interface IRaigadInstanceFactory { /** * Return a list of all Elasticsearch server nodes registered. * @param appName the cluster name * @return a list of all nodes in {@code appName} */ List<RaigadInstance> getAllIds(String appName); /** * Return the Elasticsearch server node with the given {@code id}. * @param appName the cluster name * @param id the node id * @return the node with the given {@code id}, or {@code null} if none found */ RaigadInstance getInstance(String appName, String dc, String id); /** * Create/Register an instance of the server with its info. * @param app * @param id * @param instanceID * @param hostname * @param ip * @param rac * @param dc * @param asgname * @param volumes * @return the new node */ RaigadInstance create(String app, String id, String instanceID, String hostname, String ip, String rac, String dc, String asgname, Map<String, Object> volumes); /** * Delete the server node from the registry * @param inst the node to delete */ void delete(RaigadInstance inst); /** * Update the details of the server node in registry * @param inst the node to update */ void update(RaigadInstance inst); /** * Sort the list by instance ID * @param return_ the list of nodes to sort */ void sort(List<RaigadInstance> return_); /** * Attach volumes if required * @param instance * @param mountPath * @param device */ void attachVolumes(RaigadInstance instance, String mountPath, String device); }
5,090
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/identity/EurekaHostsSupplier.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.identity; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.collect.Collections2; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Singleton; import com.netflix.appinfo.AmazonInfo; import com.netflix.appinfo.AmazonInfo.MetaDataKey; import com.netflix.appinfo.InstanceInfo; import com.netflix.astyanax.connectionpool.Host; import com.netflix.discovery.DiscoveryClient; import com.netflix.discovery.shared.Application; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; /** * Simple class that implements HostSupplier. It provides a Supplier<List<Host>> * using the {DiscoveryManager} which is the eureka client. * * Note that the class needs the eureka application name to discover all instances for that application. * */ @Singleton public class EurekaHostsSupplier implements HostSupplier { private static final Logger LOG = LoggerFactory.getLogger(EurekaHostsSupplier.class); private final DiscoveryClient discoveryClient; @Inject public EurekaHostsSupplier(DiscoveryClient discoveryClient) { this.discoveryClient = discoveryClient; } @Override public Supplier<List<Host>> getSupplier(final String clusterName) { return new Supplier<List<Host>>() { @Override public List<Host> get() { if (discoveryClient == null) { LOG.error("Discovery client cannot be null"); throw new RuntimeException("EurekaHostsSupplier needs a non-null DiscoveryClient"); } LOG.debug("Raigad fetching instance list for app: " + clusterName); Application app = discoveryClient.getApplication(clusterName.toUpperCase()); List<Host> hosts = new ArrayList<Host>(); if (app == null) { LOG.warn("Cluster '{}' not found in eureka", clusterName); return hosts; } List<InstanceInfo> ins = app.getInstances(); if (ins == null || ins.isEmpty()) { LOG.warn("Cluster '{}' found in eureka but has no instances", clusterName); return hosts; } hosts = Lists.newArrayList(Collections2.transform( Collections2.filter(ins, new Predicate<InstanceInfo>() { @Override public boolean apply(InstanceInfo input) { return input.getStatus() == InstanceInfo.InstanceStatus.UP; } }), new Function<InstanceInfo, Host>() { @Override public Host apply(InstanceInfo info) { String[] parts = StringUtils.split( StringUtils.split(info.getHostName(), ".")[0], '-'); Host host = new Host(info.getHostName(), info.getPort()) .addAlternateIpAddress( StringUtils.join(new String[] { parts[1], parts[2], parts[3], parts[4] }, ".")) .addAlternateIpAddress(info.getIPAddr()) .setId(info.getId()); try { if (info.getDataCenterInfo() instanceof AmazonInfo) { AmazonInfo amazonInfo = (AmazonInfo)info.getDataCenterInfo(); host.setRack(amazonInfo.get(MetaDataKey.availabilityZone)); } } catch (Throwable t) { LOG.error("Error getting rack for host " + host.getName(), t); } return host; } })); LOG.debug("Raigad found hosts from eureka - num hosts: " + hosts.size()); return hosts; } }; } }
5,091
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/resources/ElasticsearchAdmin.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.resources; import com.google.inject.Inject; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.defaultimpl.IElasticsearchProcess; import com.netflix.raigad.indexmanagement.ElasticsearchIndexManager; import com.netflix.raigad.utils.SystemUtils; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; @Path("/v1/esadmin") @Produces(MediaType.APPLICATION_JSON) public class ElasticsearchAdmin { private static final Logger logger = LoggerFactory.getLogger(ElasticsearchAdmin.class); private static final String REST_SUCCESS = "[\"ok\"]"; private static final String SHARD_REALLOCATION_PROPERTY = "cluster.routing.allocation.enable"; private final IConfiguration config; private final IElasticsearchProcess esProcess; private final ElasticsearchIndexManager esIndexManager; @Inject public ElasticsearchAdmin(IConfiguration config, IElasticsearchProcess esProcess, ElasticsearchIndexManager esIndexManager) { this.config = config; this.esProcess = esProcess; this.esIndexManager = esIndexManager; } @GET @Path("/start") public Response esStart() throws IOException { logger.info("Starting Elasticsearch now through a REST call..."); esProcess.start(); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } @GET @Path("/stop") public Response esStop() throws IOException { logger.info("Stopping Elasticsearch now through a REST call..."); esProcess.stop(); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } @GET @Path("/run_indexmanager") public Response manageIndex() throws Exception { logger.info("Running index manager through a REST call..."); esIndexManager.runIndexManagement(); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } @GET @Path("/existingRepositories") public Response esExistingRepositories() throws Exception { logger.info("Retrieving existing repositories through a REST call..."); String URL = "http://127.0.0.1:" + config.getHttpPort() + "/_snapshot/"; String RESPONSE = SystemUtils.runHttpGetCommand(URL); JSONObject jsonObject = (JSONObject) new JSONParser().parse(RESPONSE); return Response.ok(jsonObject, MediaType.APPLICATION_JSON).build(); } @GET @Path("/shard_allocation_enable/{type}") public Response esShardAllocationEnable(@PathParam("type") String type) throws IOException { logger.info("Enabling shard allocation through a REST call..."); if (!type.equalsIgnoreCase("transient") && !type.equalsIgnoreCase("persistent")) { throw new IOException("Parameter must be equal to transient or persistent"); } String url = "http://127.0.0.1:" + config.getHttpPort() + "/_cluster/settings"; JSONObject settings = new JSONObject(); JSONObject property = new JSONObject(); property.put(SHARD_REALLOCATION_PROPERTY, "all"); settings.put(type, property); String response = SystemUtils.runHttpPutCommand(url, settings.toJSONString()); return Response.ok(response, MediaType.APPLICATION_JSON).build(); } @GET @Path("/shard_allocation_disable/{type}") public Response esShardAllocationDisable(@PathParam("type") String type) throws IOException { logger.info("Disabling shard allocation through a REST call..."); if (!type.equalsIgnoreCase("transient") && !type.equalsIgnoreCase("persistent")) { throw new IOException("Parameter must be equal to transient or persistent"); } String url = "http://127.0.0.1:" + config.getHttpPort() + "/_cluster/settings"; JSONObject settings = new JSONObject(); JSONObject property = new JSONObject(); property.put(SHARD_REALLOCATION_PROPERTY, "none"); settings.put(type, property); SystemUtils.runHttpPutCommand(url, settings.toJSONString()); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } }
5,092
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/resources/NodeHealthCheck.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.resources; import com.netflix.raigad.utils.ElasticsearchProcessMonitor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; /** * Created by alfasi on 4/23/15. */ @Path("/v1/healthcheck") @Produces(MediaType.APPLICATION_JSON) public class NodeHealthCheck { private static final Logger logger = LoggerFactory.getLogger(NodeHealthCheck.class); private static final String REST_SUCCESS = "[\"ok\"]"; @GET @Path("/isesprocessrunning") public Response checkHealth() { logger.info("Got REST call to check Node-health..."); if (!ElasticsearchProcessMonitor.isElasticsearchRunning()) { return Response.serverError().status(500).build(); } return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } }
5,093
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/resources/ElasticsearchConfig.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.resources; import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.name.Named; import com.netflix.raigad.configuration.IConfigSource; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.identity.RaigadInstance; import com.netflix.raigad.startup.RaigadServer; import com.netflix.raigad.utils.ElasticsearchUtils; import com.netflix.raigad.utils.TribeUtils; import org.apache.commons.lang.StringUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.Arrays; import java.util.List; import java.util.stream.Stream; /** * This servlet will provide the configuration API service as and when Elasticsearch requests for it. */ @Path("/v1/esconfig") public class ElasticsearchConfig { private static final Logger logger = LoggerFactory.getLogger(ElasticsearchConfig.class); private final RaigadServer raigadServer; private final TribeUtils tribeUtils; private final IConfigSource configSrc; @Inject public ElasticsearchConfig(RaigadServer raigadServer, TribeUtils tribeUtils, @Named("custom") IConfigSource configSrc, IConfiguration config) { this.raigadServer = raigadServer; this.tribeUtils = tribeUtils; this.configSrc = configSrc; this.configSrc.initialize(config); } @GET @Path("/get_nodes") @Produces(MediaType.TEXT_PLAIN) public Response getNodes() { try { logger.info("Getting cluster nodes"); final List<RaigadInstance> instances = raigadServer.getInstanceManager().getAllInstances(); if (instances == null) { logger.error("Error getting cluster nodes"); return Response.serverError().build(); } logger.info("Got {} instances", instances.size()); JSONObject raigadJson = ElasticsearchUtils.transformRaigadInstanceToJson(instances); return Response.ok(raigadJson.toString()).build(); } catch (Exception e) { logger.error("Error getting nodes (getNodes)", e); return Response.serverError().build(); } } @GET @Path("/get_tribe_nodes/{id}") @Produces(MediaType.TEXT_PLAIN) public Response getTribeNodes(@PathParam("id") String id) { try { logger.info("Getting nodes for the source tribe cluster [{}]", id); // Find source cluster name from the tribe ID by reading YAML file String sourceTribeClusterName = tribeUtils.getTribeClusterNameFromId(id); if (StringUtils.isEmpty(sourceTribeClusterName)) { logger.error("Source tribe cluster name is null or empty, check configuration"); return Response.serverError().build(); } logger.info("Found source tribe cluster {} with ID [{}]", sourceTribeClusterName, id); final List<RaigadInstance> instances = raigadServer.getInstanceManager().getAllInstancesPerCluster(sourceTribeClusterName); if (instances == null) { logger.error("Error getting source tribe cluster nodes for {}", sourceTribeClusterName); return Response.serverError().build(); } logger.info("Got {} instances for {}", instances.size(), sourceTribeClusterName); JSONObject raigadJson = ElasticsearchUtils.transformRaigadInstanceToJson(instances); return Response.ok(raigadJson.toString()).build(); } catch (Exception e) { logger.error("Exception getting nodes (getTribeNodes)", e); return Response.serverError().build(); } } @GET @Path("/get_prop/{names}") @Produces(MediaType.APPLICATION_JSON) /* A means to fetch Fast Properties via REST @param names - comma separated list of property name */ public Response getProperty(@PathParam("names") String propNames) { if (propNames.isEmpty()) return Response.status(Response.Status.NO_CONTENT).build(); JsonObject fastPropResults = new JsonObject(); final String[] pNamesSplit = propNames.split(","); final Stream<String> pNamesStream = Arrays.stream(pNamesSplit); pNamesStream.forEach( (propName) -> { try{ String s = this.configSrc.get(propName); fastPropResults.addProperty(propName, s); } catch (Exception e) { Response.ok("Exception fetcing property " + propName + ", msg: " + e.getLocalizedMessage()).build(); } } ); return Response.ok(fastPropResults.toString()).build(); } }
5,094
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/resources/InjectedWebListener.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.resources; import com.google.inject.AbstractModule; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.Scopes; import com.google.inject.name.Names; import com.google.inject.servlet.GuiceServletContextListener; import com.google.inject.servlet.ServletModule; import com.netflix.governator.guice.LifecycleInjector; import com.netflix.governator.lifecycle.LifecycleManager; import com.netflix.raigad.aws.IAMCredential; import com.netflix.raigad.aws.ICredential; import com.netflix.raigad.backup.AbstractRepository; import com.netflix.raigad.backup.AbstractRepositorySettingsParams; import com.netflix.raigad.backup.S3Repository; import com.netflix.raigad.backup.S3RepositorySettingsParams; import com.netflix.raigad.configuration.CompositeConfigSource; import com.netflix.raigad.configuration.CustomConfigSource; import com.netflix.raigad.configuration.IConfigSource; import com.netflix.raigad.configuration.IConfiguration; import com.netflix.raigad.identity.CassandraInstanceFactory; import com.netflix.raigad.identity.EurekaHostsSupplier; import com.netflix.raigad.identity.HostSupplier; import com.netflix.raigad.identity.IRaigadInstanceFactory; import com.netflix.raigad.scheduler.GuiceJobFactory; import com.netflix.raigad.startup.RaigadServer; import com.sun.jersey.api.core.PackagesResourceConfig; import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; import com.sun.jersey.spi.container.servlet.ServletContainer; import org.quartz.SchedulerFactory; import org.quartz.impl.StdSchedulerFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class InjectedWebListener extends GuiceServletContextListener { protected static final Logger logger = LoggerFactory.getLogger(InjectedWebListener.class); @Override protected Injector getInjector() { List<Module> moduleList = new ArrayList<>(); moduleList.add(new JaxServletModule()); moduleList.add(new RaigadGuiceModule()); Injector injector; try { injector = LifecycleInjector.builder().withModules(moduleList).build().createInjector(); startJobs(injector); LifecycleManager manager = injector.getInstance(LifecycleManager.class); manager.start(); } catch (Exception e) { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(), e); } return injector; } private void startJobs(Injector injector) throws Exception { injector.getInstance(IConfiguration.class).initialize(); logger.info("** Now starting to initialize Raigad server from OSS"); injector.getInstance(RaigadServer.class).initialize(); } private static class JaxServletModule extends ServletModule { @Override protected void configureServlets() { Map<String, String> params = new HashMap<String, String>(); params.put(PackagesResourceConfig.PROPERTY_PACKAGES, "unbound"); params.put("com.sun.jersey.config.property.packages", "com.netflix.raigad.resources"); params.put(ServletContainer.PROPERTY_FILTER_CONTEXT_PATH, "/REST"); serve("/REST/*").with(GuiceContainer.class, params); } } private static class RaigadGuiceModule extends AbstractModule { @Override protected void configure() { logger.info("** Binding OSS Config classes."); // Fix bug in Jersey-Guice integration exposed by child injectors binder().bind(GuiceContainer.class).asEagerSingleton(); binder().bind(GuiceJobFactory.class).asEagerSingleton(); binder().bind(IRaigadInstanceFactory.class).to(CassandraInstanceFactory.class); // TODO: Use config.getCredentialProvider() instead of IAMCredential binder().bind(ICredential.class).to(IAMCredential.class); binder().bind(AbstractRepository.class).annotatedWith(Names.named("s3")).to(S3Repository.class); binder().bind(AbstractRepositorySettingsParams.class).annotatedWith(Names.named("s3")).to(S3RepositorySettingsParams.class); bind(SchedulerFactory.class).to(StdSchedulerFactory.class).asEagerSingleton(); bind(HostSupplier.class).to(EurekaHostsSupplier.class).in(Scopes.SINGLETON); binder().bind(IConfigSource.class).annotatedWith(Names.named("custom")).to(CompositeConfigSource.class); } } }
5,095
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/resources/ElasticsearchBackup.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.resources; import com.google.inject.Inject; import com.netflix.raigad.backup.RestoreBackupManager; import com.netflix.raigad.backup.SnapshotBackupManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; @Path("/v1/esbackup") @Produces(MediaType.APPLICATION_JSON) public class ElasticsearchBackup { private static final Logger logger = LoggerFactory.getLogger(ElasticsearchBackup.class); private static final String REST_SUCCESS = "[\"ok\"]"; private static final String REST_REPOSITORY_NAME = "repository_name"; private static final String REST_REPOSITORY_TYPE = "repository_type"; private static final String REST_SNAPSHOT_NAME = "snapshot"; private static final String REST_INDICES_NAME = "indices"; private static final String REST_RESTORE_RENAME_PATTERN = "rename_pattern"; private static final String REST_RESTORE_RENAME_REPLACEMENT = "rename_replacement"; private final SnapshotBackupManager snapshotBackupManager; private final RestoreBackupManager restoreBackupManager; @Inject public ElasticsearchBackup(SnapshotBackupManager snapshotBackupManager, RestoreBackupManager restoreBackupManager) { this.snapshotBackupManager = snapshotBackupManager; this.restoreBackupManager = restoreBackupManager; } @GET @Path("/do_snapshot") public Response snapshot() throws Exception { logger.info("Running snapshot through a REST call..."); snapshotBackupManager.runSnapshotBackup(); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } @GET @Path("/do_restore") public Response restore(@QueryParam(REST_REPOSITORY_NAME) String repoName, @QueryParam(REST_REPOSITORY_TYPE) String repoType, @QueryParam(REST_SNAPSHOT_NAME) String snapName, @QueryParam(REST_INDICES_NAME) String indicesName) throws Exception { logger.info("Running restore through a REST call..."); restoreBackupManager.runRestore(repoName, repoType, snapName, indicesName, null, null); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } @GET @Path("/do_restore_with_rename") public Response restoreWithRename(@QueryParam(REST_REPOSITORY_NAME) String repoName, @QueryParam(REST_REPOSITORY_TYPE) String repoType, @QueryParam(REST_SNAPSHOT_NAME) String snapName, @QueryParam(REST_INDICES_NAME) String indicesName, @QueryParam(REST_RESTORE_RENAME_PATTERN) String renamePattern, @QueryParam(REST_RESTORE_RENAME_REPLACEMENT) String renameReplacement) throws Exception { logger.info("Running Restore with rename through REST call ..."); restoreBackupManager.runRestore(repoName, repoType, snapName, indicesName, renamePattern, renameReplacement); return Response.ok(REST_SUCCESS, MediaType.APPLICATION_JSON).build(); } }
5,096
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/resources/SecurityGroupAdmin.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.resources; import com.google.inject.Inject; import com.netflix.raigad.identity.IMembership; import org.apache.commons.validator.routines.InetAddressValidator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.Collections; /** * This http endpoint allows direct updates (adding/removing) (CIDR) IP addresses and port * ranges to the security group for this app. */ @Path("/v1/secgroup") @Produces(MediaType.TEXT_PLAIN) public class SecurityGroupAdmin { private static final Logger log = LoggerFactory.getLogger(SecurityGroupAdmin.class); private static final Integer DEFAULT_MASK = 32; private final IMembership membership; @Inject public SecurityGroupAdmin(IMembership membership) { this.membership = membership; } @POST public Response addACL( @QueryParam("ip") String ipAddress, @QueryParam("mask") Integer mask, @QueryParam("fromPort") int fromPort, @QueryParam("toPort") int toPort) { if (!InetAddressValidator.getInstance().isValid(ipAddress)) { log.error("Invalid IP address", ipAddress); return Response.status(Response.Status.BAD_REQUEST).build(); } if (mask == null || mask < 8) { log.info("IP mask is too wide or not provided, using /32"); mask = DEFAULT_MASK; } try { membership.addACL(Collections.singletonList(String.format("%s/%d", ipAddress, mask)), fromPort, toPort); } catch (Exception e) { log.error("Error adding ACL to a security group", e); return Response.serverError().build(); } return Response.ok().build(); } @DELETE public Response removeACL( @QueryParam("ip") String ipAddress, @QueryParam("mask") Integer mask, @QueryParam("fromPort") int fromPort, @QueryParam("toPort") int toPort) { if (!InetAddressValidator.getInstance().isValid(ipAddress)) { log.error("Invalid IP address", ipAddress); return Response.status(Response.Status.BAD_REQUEST).build(); } if (mask == null) { log.info("IP mask not provided, using /32"); mask = DEFAULT_MASK; } try { membership.removeACL(Collections.singletonList(String.format("%s/%d", ipAddress, mask)), fromPort, toPort); } catch (Exception e) { log.error("Error removing ACL from a security group", e); return Response.serverError().build(); } return Response.ok().build(); } }
5,097
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/scheduler/TaskTimer.java
/** * Copyright 2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.scheduler; import org.quartz.Trigger; import java.text.ParseException; /** * Interface to represent time/interval */ public interface TaskTimer { public Trigger getTrigger() throws ParseException; }
5,098
0
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad
Create_ds/Raigad/raigad/src/main/java/com/netflix/raigad/scheduler/CronTimer.java
/** * Copyright 2017 Netflix, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.raigad.scheduler; import org.apache.commons.lang.StringUtils; import org.quartz.CronTrigger; import org.quartz.Scheduler; import org.quartz.Trigger; import java.text.ParseException; /** * Runs jobs at the specified absolute time and frequency */ public class CronTimer implements TaskTimer { private String cronExpression; private String triggerName; public enum DayOfWeek { SUN, MON, TUE, WED, THU, FRI, SAT } /** * Hourly cron */ public CronTimer(int minute, int sec) { cronExpression = sec + " " + minute + " * * * ?"; } /** * Hourly cron with explicit TriggerName */ public CronTimer(int minute, int sec, String triggerName) { this.triggerName = triggerName; cronExpression = sec + " " + minute + " * * * ?"; } /** * Daily cron */ public CronTimer(int hour, int minute, int sec) { cronExpression = sec + " " + minute + " " + hour + " * * ?"; } /** * Daily cron with explicit TriggerName */ public CronTimer(int hour, int minute, int sec, String triggerName) { this.triggerName = triggerName; cronExpression = sec + " " + minute + " " + hour + " * * ?"; } /** * Weekly cron jobs */ public CronTimer(DayOfWeek dayofweek, int hour, int minute, int sec) { cronExpression = sec + " " + minute + " " + hour + " * * " + dayofweek; } /** * Cron expression */ public CronTimer(String expression) { this.cronExpression = expression; } public Trigger getTrigger() throws ParseException { if (StringUtils.isNotBlank(triggerName)) { return new CronTrigger("CronTrigger" + triggerName, Scheduler.DEFAULT_GROUP, cronExpression); } else { return new CronTrigger("CronTrigger", Scheduler.DEFAULT_GROUP, cronExpression); } } }
5,099