index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/QueriesEdgeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.T; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class QueriesEdgeResult implements PGResult { private final Map<?, ?> edgeMap; private final Map<?, ?> properties; public QueriesEdgeResult(Map<?, ?> input) { edgeMap = input; properties = new HashMap<>(input); properties.remove(T.label); properties.remove(T.id); properties.remove(Direction.OUT); properties.remove(Direction.IN); } @Override public GraphElementType getGraphElementType() { return GraphElementType.edges; } public List<String> getLabel() { return Collections.singletonList(String.valueOf(edgeMap.get(T.label))); } @Override public String getId() { return String.valueOf(edgeMap.get(T.id)); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) properties; } @Override public String getFrom() { return String.valueOf(((Map<String, Object>)edgeMap.get(Direction.OUT)).get(T.id)); } @Override public String getTo() { return String.valueOf(((Map<String, Object>)edgeMap.get(Direction.IN)).get(T.id)); } @Override public List<String> getFromLabels() { return Collections.singletonList(String.valueOf(((Map<String, Object>)edgeMap.get(Direction.OUT)).get(T.label))); } @Override public List<String> getToLabels() { return Collections.singletonList(String.valueOf(((Map<String, Object>)edgeMap.get(Direction.IN)).get(T.label))); } }
4,300
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/ExportPGNodeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import java.util.List; import java.util.Map; public class ExportPGNodeResult implements PGResult { private final Map<String, Object> nodeMap; public ExportPGNodeResult(Map<String, Object> input) { nodeMap = input; } @Override public GraphElementType getGraphElementType() { return GraphElementType.nodes; } @Override public List<String> getLabel() { return (List<String>) nodeMap.get("~label"); } @Override public String getId() { return String.valueOf(nodeMap.get("~id")); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) nodeMap.get("properties"); } @Override public String getFrom() { throw new IllegalStateException("Illegal attempt to getFrom() from a Node Result"); } @Override public String getTo() { throw new IllegalStateException("Illegal attempt to getTo() from a Node Result"); } @Override public List<String> getFromLabels() { throw new IllegalStateException("Illegal attempt to getFromLabels() from a Node Result"); } @Override public List<String> getToLabels() { throw new IllegalStateException("Illegal attempt to getToLabels() from a Node Result"); } }
4,301
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/PGEdgeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import java.util.ArrayList; import java.util.List; import java.util.Map; public class PGEdgeResult implements PGResult{ private final Map<String, Object> edgeMap; public PGEdgeResult(Map<String, Object> input) { edgeMap = input; } @Override public GraphElementType getGraphElementType() { return GraphElementType.nodes; } @Override public List<String> getLabel() { List<String> labels = new ArrayList<>(); labels.add(String.valueOf(edgeMap.get("~label"))); return labels; } @Override public String getId() { return String.valueOf(edgeMap.get("~id")); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) edgeMap.get("properties"); } @Override public String getFrom() { return String.valueOf(edgeMap.get("~from")); } @Override public String getTo() { return String.valueOf(edgeMap.get("~to")); } @Override public List<String> getFromLabels() { return (List<String>) edgeMap.get("~fromLabels"); } @Override public List<String> getToLabels() { return (List<String>) edgeMap.get("~toLabels"); } }
4,302
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/PGResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import java.util.List; import java.util.Map; public interface PGResult { public GraphElementType getGraphElementType(); public List<String> getLabel(); public String getId(); public Map<String, Object> getProperties(); public String getFrom(); public String getTo(); public List<String> getFromLabels(); public List<String> getToLabels(); }
4,303
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/io/result/QueriesNodeResult.java
package com.amazonaws.services.neptune.propertygraph.io.result; import com.amazonaws.services.neptune.propertygraph.schema.GraphElementType; import org.apache.tinkerpop.gremlin.structure.T; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.ArrayList; public class QueriesNodeResult implements PGResult { private final Map<?, ?> nodeMap; private final Map<?, ?> properties; public QueriesNodeResult(Map<?, ?> input) { nodeMap = input; properties = new HashMap<>(input); properties.remove(T.label); properties.remove(T.id); } @Override public GraphElementType getGraphElementType() { return GraphElementType.nodes; } public List<String> getLabel() { return Collections.singletonList(String.valueOf(nodeMap.get(T.label))); } @Override public String getId() { return String.valueOf(nodeMap.get(T.id)); } @Override public Map<String, Object> getProperties() { return (Map<String, Object>) properties; } @Override public String getFrom() { throw new IllegalStateException("Illegal attempt to getFrom() from a Node Result"); } @Override public String getTo() { throw new IllegalStateException("Illegal attempt to getTo() from a Node Result"); } @Override public List<String> getFromLabels() { throw new IllegalStateException("Illegal attempt to getFromLabels() from a Node Result"); } @Override public List<String> getToLabels() { throw new IllegalStateException("Illegal attempt to getToLabels() from a Node Result"); } }
4,304
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/GraphSchema.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.io.Jsonizable; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.Collection; import java.util.HashMap; import java.util.Map; public class GraphSchema implements Jsonizable<Boolean> { public static GraphSchema fromJson(JsonNode json) { Map<GraphElementType, GraphElementSchemas> graphElementsSchemas = new HashMap<>(); for (GraphElementType graphElementType : GraphElementType.values()) { JsonNode node = json.path(graphElementType.name()); if (!node.isMissingNode() && node.isArray()) { graphElementsSchemas.put(graphElementType, GraphElementSchemas.fromJson((ArrayNode) node)); } } return new GraphSchema(graphElementsSchemas); } private final Map<GraphElementType, GraphElementSchemas> graphElementsSchemas; public GraphSchema() { this(new HashMap<>()); } public GraphSchema(Map<GraphElementType, GraphElementSchemas> graphElementsSchemas) { this.graphElementsSchemas = graphElementsSchemas; } public void update(GraphElementType graphElementType, Map<?, Object> properties, boolean allowStructuralElements) { graphElementSchemasFor(graphElementType).update(properties, allowStructuralElements); } public GraphElementSchemas copyOfGraphElementSchemasFor(GraphElementType graphElementType) { return graphElementSchemasFor(graphElementType).createCopy(); } public GraphElementSchemas graphElementSchemasFor(GraphElementType graphElementType) { if (!graphElementsSchemas.containsKey(graphElementType)) { graphElementsSchemas.put(graphElementType, new GraphElementSchemas()); } return graphElementsSchemas.get(graphElementType); } public Collection<GraphElementSchemas> graphElementSchemas() { return graphElementsSchemas.values(); } public boolean isEmpty() { return graphElementsSchemas.isEmpty(); } public boolean hasNodeSchemas() { return graphElementsSchemas.containsKey(GraphElementType.nodes); } public boolean hasEdgeSchemas() { return graphElementsSchemas.containsKey(GraphElementType.edges); } @Override public JsonNode toJson(Boolean includeFilenames) { ObjectNode json = JsonNodeFactory.instance.objectNode(); for (Map.Entry<GraphElementType, GraphElementSchemas> entry : graphElementsSchemas.entrySet()) { String key = entry.getKey().name(); ArrayNode arrayNode = entry.getValue().toJson(includeFilenames); json.set(key, arrayNode); } return json; } }
4,305
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/GraphElementType.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.propertygraph.EdgesClient; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.GraphClient; import com.amazonaws.services.neptune.propertygraph.NodesClient; import com.amazonaws.services.neptune.propertygraph.io.EdgesWriterFactory; import com.amazonaws.services.neptune.propertygraph.io.NodesWriterFactory; import com.amazonaws.services.neptune.propertygraph.io.WriterFactory; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Arrays; import java.util.Collection; public enum GraphElementType { nodes { @Override public Collection<String> tokenNames() { return Arrays.asList("~id", "~label"); } @Override public GraphClient<? extends PGResult> graphClient(GraphTraversalSource g, boolean tokensOnly, ExportStats stats, FeatureToggles featureToggles) { return new NodesClient(g, tokensOnly, stats, featureToggles); } @Override public WriterFactory<? extends PGResult> writerFactory() { return new NodesWriterFactory(); } }, edges { @Override public Collection<String> tokenNames() { return Arrays.asList("~id", "~label", "~from", "~to"); } @Override public GraphClient<? extends PGResult> graphClient(GraphTraversalSource g, boolean tokensOnly, ExportStats stats, FeatureToggles featureToggles) { return new EdgesClient(g, tokensOnly, stats, featureToggles); } @Override public WriterFactory<? extends PGResult> writerFactory() { return new EdgesWriterFactory(); } }; public abstract Collection<String> tokenNames(); public abstract GraphClient<? extends PGResult> graphClient(GraphTraversalSource g, boolean tokensOnly, ExportStats stats, FeatureToggles featureToggles); public abstract WriterFactory<? extends PGResult> writerFactory(); }
4,306
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/PropertySchemaStats.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import java.util.EnumMap; import java.util.Map; import java.util.stream.Collectors; public class PropertySchemaStats { private final Object property; private final boolean lock; private int minCardinality; private int maxCardinality; private long observationCount; private long numberValuesCount; private final EnumMap<DataType, Integer> dataTypeCounts; public PropertySchemaStats(Object property) { this(property, -1, -1, 0, 0, new EnumMap<>(DataType.class), false); } public PropertySchemaStats(Object property, int minCardinality, int maxCardinality, long observationCount, long numberValuesCount, EnumMap<DataType, Integer> dataTypeCounts, boolean lock) { this.property = property; this.minCardinality = minCardinality; this.maxCardinality = maxCardinality; this.observationCount = observationCount; this.numberValuesCount = numberValuesCount; this.dataTypeCounts = dataTypeCounts; this.lock = lock; } public void recordObservation(PropertySchema.PropertyValueMetadata propertyValueMetadata) { observationCount++; if (!lock) { int size = propertyValueMetadata.size(); if (minCardinality < 0) { minCardinality = size; maxCardinality = size; } if (size > maxCardinality) { maxCardinality = size; } if (size < minCardinality) { minCardinality = size; } numberValuesCount += size; propertyValueMetadata.addTo(dataTypeCounts); } } public Object property() { return property; } public long observationCount() { return observationCount; } public long numberValuesCount() { return numberValuesCount; } public int minCardinality() { return minCardinality; } public int maxCardinality() { return maxCardinality; } public boolean isUniformCardinality() { return minCardinality == maxCardinality; } public EnumMap<DataType, Integer> dataTypeCounts() { return dataTypeCounts; } public PropertySchemaStats union(PropertySchemaStats other) { int newMinCardinality = Math.min(minCardinality, other.minCardinality()); int newMaxCardinality = Math.max(maxCardinality, other.maxCardinality()); long newObservationCount = observationCount + other.observationCount(); long newNumberValuesCount = numberValuesCount + other.numberValuesCount(); EnumMap<DataType, Integer> newDataTypeCounts = new EnumMap<DataType, Integer>(DataType.class); newDataTypeCounts.putAll(dataTypeCounts); for (Map.Entry<DataType, Integer> entry : other.dataTypeCounts.entrySet()) { DataType key = entry.getKey(); int i = newDataTypeCounts.containsKey(key) ? newDataTypeCounts.get(key) : 0; newDataTypeCounts.put(key, i + entry.getValue()); } return new PropertySchemaStats(property, newMinCardinality, newMaxCardinality, newObservationCount, newNumberValuesCount, newDataTypeCounts, false); } public PropertySchemaStats createCopy() { EnumMap<DataType, Integer> newDataTypeCounts = new EnumMap<DataType, Integer>(DataType.class); newDataTypeCounts.putAll(dataTypeCounts); return new PropertySchemaStats(property, minCardinality, maxCardinality, observationCount, numberValuesCount, newDataTypeCounts, false); } public PropertySchemaStats createLockedCopyForFreshObservations() { return new PropertySchemaStats( property, minCardinality, maxCardinality, 0, numberValuesCount, dataTypeCounts, true ); } @Override public String toString() { String s = dataTypeCounts.entrySet().stream(). map(e -> e.getKey().name() + ":" + e.getValue()). collect(Collectors.joining(",")); return property + " {" + "propertyCount=" + observationCount + ", minCardinality=" + minCardinality + ", maxCardinality=" + maxCardinality + ", recordCount=" + numberValuesCount + ", dataTypeCounts=[" + s + "]" + "}"; } }
4,307
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/MasterLabelSchema.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import java.util.Collection; import java.util.stream.Collectors; public class MasterLabelSchema { private final LabelSchema labelSchema; private final Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas; public MasterLabelSchema( LabelSchema labelSchema, Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas) { this.labelSchema = labelSchema; this.fileSpecificLabelSchemas = fileSpecificLabelSchemas; } public LabelSchema labelSchema(){ return labelSchema; } public Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas(){ return fileSpecificLabelSchemas; } public Collection<String> outputIds() { return fileSpecificLabelSchemas.stream().map(FileSpecificLabelSchema::outputId).collect(Collectors.toList()); } }
4,308
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/MasterLabelSchemas.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; import java.util.Map; public class MasterLabelSchemas { private final Map<Label, MasterLabelSchema> masterLabelSchemas; private final GraphElementType graphElementType; public MasterLabelSchemas(Map<Label, MasterLabelSchema> masterLabelSchemas, GraphElementType graphElementType) { this.masterLabelSchemas = masterLabelSchemas; this.graphElementType = graphElementType; } public Collection<MasterLabelSchema> schemas() { return masterLabelSchemas.values(); } public GraphElementType graphElementType() { return graphElementType; } public GraphElementSchemas toGraphElementSchemas() { GraphElementSchemas graphElementSchemas = new GraphElementSchemas(); for (MasterLabelSchema masterLabelSchema : masterLabelSchemas.values()) { graphElementSchemas.addLabelSchema(masterLabelSchema.labelSchema(), masterLabelSchema.outputIds()); } return graphElementSchemas; } }
4,309
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/DataType.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.io.CsvPrinterOptions; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.time.Instant; import java.time.format.DateTimeFormatter; import java.util.Collection; import java.util.stream.Collectors; public enum DataType { None { @Override public String typeDescription() { return ""; } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { return value; } @Override public int compare(Object v1, Object v2) { return -1; } }, Boolean { @Override public String typeDescription() { return ":bool"; } @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeBoolean((java.lang.Boolean) Boolean.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeBooleanField(key, (java.lang.Boolean) Boolean.convert(value)); } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { return java.lang.Boolean.parseBoolean(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Boolean.compare((boolean) v1, (boolean) v2); } }, Byte { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Byte) Byte.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Byte) Byte.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Byte.parseByte(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Byte.compare((Byte) v1, (Byte) v2); } }, Short { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Short) Short.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Short) Short.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Short.parseShort(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Short.compare((short) v1, (short) v2); } }, Integer { @Override public String typeDescription() { return ":int"; } @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((int) value); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (int) value); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Integer.parseInt(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Integer.compare((int) v1, (int) v2); } }, Long { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Long) Long.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Long) Long.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Long.parseLong(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Long.compare((long) v1, (long) v2); } }, Float { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Float) Float.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Float) Float.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Float.parseFloat(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Float.compare((float) v1, (float) v2); } }, Double { @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeNumber((java.lang.Double) Double.convert(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeNumberField(key, (java.lang.Double) Double.convert(value)); } @Override public boolean isNumeric() { return true; } @Override public Object convert(Object value) { return java.lang.Double.parseDouble(java.lang.String.valueOf(value)); } @Override public int compare(Object v1, Object v2) { return java.lang.Double.compare((double) v1, (double) v2); } }, String { @Override public String format(Object value) { return format(value, false); } @Override public String format(Object value, boolean escapeNewline) { java.lang.String escaped = escapeDoubleQuotes(value); if (escapeNewline){ escaped = escapeNewlineChar(escaped); } if (StringUtils.isNotEmpty(escaped)) { return java.lang.String.format("\"%s\"", escaped); } else { return ""; } } private String escapeNewlineChar(String value) { return value.replace("\n", "\\n"); } @Override public String formatList(Collection<?> values, CsvPrinterOptions options) { if (values.isEmpty()) { return ""; } return java.lang.String.format("\"%s\"", values.stream(). map(v -> DataType.escapeSeparators(v, options.multiValueSeparator())). map(DataType::escapeDoubleQuotes). map(v -> options.escapeNewline() ? escapeNewlineChar(v) : v). collect(Collectors.joining(options.multiValueSeparator()))); } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { return java.lang.String.valueOf(value); } @Override public int compare(Object v1, Object v2) { return java.lang.String.valueOf(v1).compareTo(java.lang.String.valueOf(v2)); } }, Date { @Override public String format(Object value) { return format(value, false); } @Override public String format(Object value, boolean escapeNewline) { try { java.util.Date date = (java.util.Date) value; return DateTimeFormatter.ISO_INSTANT.format(date.toInstant()); } catch (ClassCastException e) { return value.toString(); } } @Override public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeString(format(value)); } @Override public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, format(value)); } @Override public void printAsStringTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, format(value)); } @Override public boolean isNumeric() { return false; } @Override public Object convert(Object value) { if (java.util.Date.class.isAssignableFrom(value.getClass())) { return value; } Instant instant = Instant.parse(value.toString()); return new java.util.Date(instant.toEpochMilli()); } @Override public int compare(Object v1, Object v2) { return ((java.util.Date) v1).compareTo((java.util.Date) v2); } }; public static DataType dataTypeFor(Class<?> cls) { String name = cls.getSimpleName(); try { return DataType.valueOf(name); } catch (IllegalArgumentException e) { return DataType.String; } } public static DataType getBroadestType(DataType oldType, DataType newType) { if (oldType == newType) { return newType; } else if (oldType == None) { return newType; } else if (oldType == Boolean) { return String; } else if (oldType == String || newType == String) { return String; } else { if (newType.ordinal() > oldType.ordinal()) { return newType; } else { return oldType; } } } public static String escapeSeparators(Object value, String separator) { if (separator.isEmpty()) { return value.toString(); } String temp = value.toString().replace("\\" + separator, separator); return temp.replace(separator, "\\" + separator); } public static String escapeDoubleQuotes(Object value) { return value.toString().replace("\"", "\"\""); } public String typeDescription() { return java.lang.String.format(":%s", name().toLowerCase()); } public String format(Object value) { return value.toString(); } public String format(Object value, boolean escapeNewline) { return value.toString(); } public void printTo(JsonGenerator generator, Object value) throws IOException { generator.writeString(value.toString()); } public void printTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, value.toString()); } public void printAsStringTo(JsonGenerator generator, String key, Object value) throws IOException { generator.writeStringField(key, value.toString()); } public String formatList(Collection<?> values, CsvPrinterOptions options) { return values.stream().map(v -> format(v, options.escapeNewline())).collect(Collectors.joining(options.multiValueSeparator())); } public abstract boolean isNumeric(); public abstract Object convert(Object value); public abstract int compare(Object v1, Object v2); }
4,310
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/GraphElementSchemas.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.tinkerpop.gremlin.structure.T; import java.util.*; import java.util.stream.Collectors; public class GraphElementSchemas { public static GraphElementSchemas fromJson(ArrayNode arrayNode) { GraphElementSchemas graphElementSchemas = new GraphElementSchemas(); for (JsonNode node : arrayNode) { Label label = Label.fromJson(node.path("label")); Collection<String> filenames = new ArrayList<>(); if (node.has("files")) { ArrayNode filenamesArray = (ArrayNode) node.path("files"); for (JsonNode jsonNode : filenamesArray) { filenames.add(jsonNode.textValue()); } } graphElementSchemas.addLabelSchema(new LabelSchema(label), filenames); if (node.has("properties")) { ArrayNode propertiesArray = (ArrayNode) node.path("properties"); for (JsonNode propertyNode : propertiesArray) { if (propertyNode.isObject()) { String key = propertyNode.path("property").textValue(); DataType dataType = propertyNode.has("dataType") ? Enum.valueOf(DataType.class, propertyNode.path("dataType").textValue()) : DataType.None; boolean isMultiValue = propertyNode.has("isMultiValue") && propertyNode.path("isMultiValue").booleanValue(); boolean isNullable = propertyNode.has("isNullable") && propertyNode.path("isNullable").booleanValue(); EnumSet<DataType> allTypes = EnumSet.noneOf(DataType.class); if (propertyNode.has("allTypes") ){ ArrayNode allTypesNode = (ArrayNode) propertyNode.path("allTypes"); for (JsonNode jsonNode : allTypesNode) { allTypes.add(DataType.valueOf(jsonNode.textValue())); } } graphElementSchemas.getSchemaFor(label).put( key, new PropertySchema(key, isNullable, dataType, isMultiValue, allTypes)); } else { String property = propertyNode.textValue(); graphElementSchemas.getSchemaFor(label).put( property, new PropertySchema(property, false, DataType.None, false, EnumSet.noneOf(DataType.class))); } } } } return graphElementSchemas; } private final Map<Label, LabelSchemaContainer> labelSchemas = new HashMap<>(); public void addLabelSchema(LabelSchema labelSchema) { addLabelSchema(labelSchema, Collections.emptyList()); } public void addLabelSchema(LabelSchema labelSchema, Collection<String> outputIds) { labelSchemas.put(labelSchema.label(), new LabelSchemaContainer(labelSchema, outputIds)); } public Collection<LabelSchema> labelSchemas() { return labelSchemas.values().stream().map(LabelSchemaContainer::labelSchema).collect(Collectors.toList()); } public LabelSchema getSchemaFor(Label label) { if (!labelSchemas.containsKey(label)) { addLabelSchema(new LabelSchema(label)); } return labelSchemas.get(label).labelSchema(); } public Collection<String> getOutputIdsFor(Label label) { if (!labelSchemas.containsKey(label)) { return Collections.emptyList(); } return labelSchemas.get(label).outputIds(); } public boolean hasSchemaFor(Label label) { return labelSchemas.containsKey(label); } public void update(Map<?, ?> properties, boolean allowStructuralElements) { Object value = properties.get(T.label); Label label; if (List.class.isAssignableFrom(value.getClass())){ @SuppressWarnings("unchecked") List<String> values = (List<String>) value; label = new Label(values); } else { label = new Label(String.valueOf(value)); } update(label, properties, allowStructuralElements); } public void update(Label label, Map<?, ?> properties, boolean allowStructuralElements) { LabelSchema labelSchema = getSchemaFor(label); for (PropertySchema propertySchema : labelSchema.propertySchemas()) { if (!properties.containsKey(propertySchema.property())) { propertySchema.makeNullable(); } } for (Map.Entry<?, ?> entry : properties.entrySet()) { Object property = entry.getKey(); if (allowStructuralElements || !(isToken(property))) { if (!labelSchema.containsProperty(property)) { labelSchema.put(property, new PropertySchema(property)); } labelSchema.getPropertySchema(property).accept(entry.getValue(), true); } } } public Collection<Label> labels() { return labelSchemas.keySet(); } private boolean isToken(Object key) { return key.equals(T.label) || key.equals(T.id) || key.equals(T.key) || key.equals(T.value); } public ArrayNode toJson() { return toJson(false); } public ArrayNode toJson(boolean includeFilenames) { ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(); for (Map.Entry<Label, LabelSchemaContainer> entry : labelSchemas.entrySet()) { Label label = entry.getKey(); ObjectNode labelNode = JsonNodeFactory.instance.objectNode(); labelNode.set("label", label.toJson()); LabelSchema labelSchema = entry.getValue().labelSchema(); ArrayNode propertiesNode = JsonNodeFactory.instance.arrayNode(); for (PropertySchema propertySchema : labelSchema.propertySchemas()) { ArrayNode allTypesNode = JsonNodeFactory.instance.arrayNode(); for (DataType dataType : propertySchema.allTypes()) { allTypesNode.add(dataType.name()); } ObjectNode propertyNode = JsonNodeFactory.instance.objectNode(); propertyNode.put("property", propertySchema.property().toString()); propertyNode.put("dataType", propertySchema.dataType().name()); propertyNode.put("isMultiValue", propertySchema.isMultiValue()); propertyNode.put("isNullable", propertySchema.isNullable()); propertyNode.set("allTypes", allTypesNode); propertiesNode.add(propertyNode); } labelNode.set("properties", propertiesNode); if (includeFilenames){ ArrayNode filesNode = JsonNodeFactory.instance.arrayNode(); for (String outputId : entry.getValue().outputIds()) { filesNode.add(outputId); } labelNode.set("files", filesNode); } arrayNode.add(labelNode); } return arrayNode; } public GraphElementSchemas createCopy() { return fromJson(toJson()); } private static class LabelSchemaContainer { private final LabelSchema labelSchema; private final Collection<String> outputIds; private LabelSchemaContainer(LabelSchema labelSchema, Collection<String> outputIds) { this.labelSchema = labelSchema; this.outputIds = outputIds; } public LabelSchema labelSchema() { return labelSchema; } public Collection<String> outputIds() { return outputIds; } } }
4,311
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/CreateGraphSchemaCommand.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; public interface CreateGraphSchemaCommand { GraphSchema execute() throws Exception; }
4,312
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/ExportSpecification.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.cluster.ConcurrencyConfig; import com.amazonaws.services.neptune.export.FeatureToggle; import com.amazonaws.services.neptune.export.FeatureToggles; import com.amazonaws.services.neptune.io.Status; import com.amazonaws.services.neptune.io.StatusOutputFormat; import com.amazonaws.services.neptune.propertygraph.*; import com.amazonaws.services.neptune.propertygraph.io.ExportPropertyGraphTask; import com.amazonaws.services.neptune.propertygraph.io.GraphElementHandler; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphTargetConfig; import com.amazonaws.services.neptune.propertygraph.io.result.PGResult; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; public class ExportSpecification { private final GraphElementType graphElementType; private final LabelsFilter labelsFilter; private final GremlinFilters gremlinFilters; private final boolean tokensOnly; private final ExportStats stats; private final FeatureToggles featureToggles; public ExportSpecification(GraphElementType graphElementType, LabelsFilter labelsFilter, GremlinFilters gremlinFilters, ExportStats stats, boolean tokensOnly, FeatureToggles featureToggles) { this.graphElementType = graphElementType; this.labelsFilter = labelsFilter; this.gremlinFilters = gremlinFilters; this.tokensOnly = tokensOnly; this.stats = stats; this.featureToggles = featureToggles; } public void scan(GraphSchema graphSchema, GraphTraversalSource g) { if (tokensOnly) { return; } GraphClient<? extends PGResult> graphClient = graphElementType.graphClient(g, tokensOnly, stats, featureToggles); graphClient.queryForSchema( new CreateSchemaHandler(graphElementType, graphSchema), Range.ALL, labelsFilter, gremlinFilters); } public void sample(GraphSchema graphSchema, GraphTraversalSource g, long sampleSize) { if (tokensOnly) { return; } GraphClient<? extends PGResult> graphClient = graphElementType.graphClient(g, tokensOnly, stats, featureToggles); Collection<Label> labels = labelsFilter.getLabelsUsing(graphClient); for (Label label : labels) { graphClient.queryForSchema( new CreateSchemaHandler(graphElementType, graphSchema), new Range(0, sampleSize), labelsFilter.filterFor(label), gremlinFilters); } } public String description() { return labelsFilter.description(graphElementType.name()); } public RangeFactory createRangeFactory(GraphTraversalSource g, RangeConfig rangeConfig, ConcurrencyConfig concurrencyConfig) { return RangeFactory.create( graphElementType.graphClient(g, tokensOnly, stats, featureToggles), labelsFilter, gremlinFilters, rangeConfig, concurrencyConfig); } public ExportPropertyGraphTask createExportTask(GraphSchema graphSchema, GraphTraversalSource g, PropertyGraphTargetConfig targetConfig, GremlinFilters gremlinFilters, RangeFactory rangeFactory, Status status, AtomicInteger index, AtomicInteger fileDescriptorCount, int maxFileDescriptorCount) { return new ExportPropertyGraphTask( graphSchema.copyOfGraphElementSchemasFor(graphElementType), labelsFilter, graphElementType.graphClient(g, tokensOnly, stats, featureToggles), graphElementType.writerFactory(), targetConfig, rangeFactory, gremlinFilters, status, index, fileDescriptorCount, maxFileDescriptorCount ); } public MasterLabelSchemas createMasterLabelSchemas(Collection<FileSpecificLabelSchemas> fileSpecificLabelSchemasCollection) { Set<Label> labels = new HashSet<>(); fileSpecificLabelSchemasCollection.forEach(s -> labels.addAll(s.labels())); Map<Label, MasterLabelSchema> masterLabelSchemas = new HashMap<>(); for (Label label : labels) { LabelSchema masterLabelSchema = new LabelSchema(label); Collection<FileSpecificLabelSchema> fileSpecificLabelSchemas = new ArrayList<>(); for (FileSpecificLabelSchemas fileSpecificLabelSchemasForTask : fileSpecificLabelSchemasCollection) { if (fileSpecificLabelSchemasForTask.hasSchemasForLabel(label)) { Set<LabelSchema> labelSchemaSet = new HashSet<>(); for (FileSpecificLabelSchema fileSpecificLabelSchema : fileSpecificLabelSchemasForTask.fileSpecificLabelSchemasFor(label)) { fileSpecificLabelSchemas.add(fileSpecificLabelSchema); labelSchemaSet.add(fileSpecificLabelSchema.labelSchema()); } for (LabelSchema labelSchema : labelSchemaSet) { masterLabelSchema = masterLabelSchema.union(labelSchema); } } } masterLabelSchemas.put( label, new MasterLabelSchema(masterLabelSchema, fileSpecificLabelSchemas)); } return new MasterLabelSchemas(masterLabelSchemas, graphElementType); } public Collection<ExportSpecification> splitByLabel() { if (graphElementType == GraphElementType.edges || featureToggles.containsFeature(FeatureToggle.ExportByIndividualLabels)) { return labelsFilter.split().stream() .map(l -> new ExportSpecification(graphElementType, l, gremlinFilters, stats, tokensOnly, featureToggles)) .collect(Collectors.toList()); } else { return Collections.singletonList(this); } } public GraphElementType getGraphElementType() { return graphElementType; } public LabelsFilter getLabelsFilter() { return labelsFilter; } private static class CreateSchemaHandler implements GraphElementHandler<Map<?, Object>> { private final GraphElementType graphElementType; private final GraphSchema graphSchema; private final Status status; private CreateSchemaHandler(GraphElementType graphElementType, GraphSchema graphSchema) { this.graphElementType = graphElementType; this.graphSchema = graphSchema; this.status = new Status(StatusOutputFormat.Dot); } @Override public void handle(Map<?, Object> properties, boolean allowTokens) { status.update(); graphSchema.update(graphElementType, properties, allowTokens); } @Override public void close() throws Exception { // Do nothing } } }
4,313
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/FileSpecificLabelSchemas.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; public class FileSpecificLabelSchemas { private final Map<Label, Collection<FileSpecificLabelSchema>> fileSpecificLabelSchemas = new HashMap<>(); public void add(String outputId, PropertyGraphExportFormat format, LabelSchema labelSchema) { if (!fileSpecificLabelSchemas.containsKey(labelSchema.label())) { fileSpecificLabelSchemas.put(labelSchema.label(), new ArrayList<>()); } Collection<FileSpecificLabelSchema> schemas = fileSpecificLabelSchemas.get(labelSchema.label()); for (FileSpecificLabelSchema schema : schemas) { if (schema.outputId().equals(outputId)){ return; } } schemas.add(new FileSpecificLabelSchema(outputId, format, labelSchema)); } public Collection<Label> labels() { return fileSpecificLabelSchemas.keySet(); } public boolean hasSchemasForLabel(Label label){ return fileSpecificLabelSchemas.containsKey(label); } public Collection<FileSpecificLabelSchema> fileSpecificLabelSchemasFor(Label label){ return fileSpecificLabelSchemas.get(label); } }
4,314
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/TokensOnly.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; public enum TokensOnly { off, nodes { @Override public boolean nodeTokensOnly() { return true; } }, edges { @Override public boolean edgeTokensOnly() { return true; } }, both { @Override public boolean nodeTokensOnly() { return true; } @Override public boolean edgeTokensOnly() { return true; } }; public boolean nodeTokensOnly() { return false; } public boolean edgeTokensOnly() { return false; } }
4,315
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/LabelSchema.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.Label; import org.apache.commons.lang.StringUtils; import java.util.*; public class LabelSchema { private final Label label; private final Map<Object, PropertySchema> propertySchemas = new LinkedHashMap<>(); private final Map<Object, PropertySchemaStats> propertySchemaStats = new LinkedHashMap<>(); public LabelSchema(Label label) { this.label = label; } public void put(Object property, PropertySchema propertySchema) { put(property, propertySchema, new PropertySchemaStats(property)); } private void put(Object property, PropertySchema propertySchema, PropertySchemaStats stats) { if (!property.equals(propertySchema.property())) { throw new IllegalStateException(String.format("Property name mismatch: %s, %s", property, propertySchema.property())); } propertySchemas.put(property, propertySchema); propertySchemaStats.put(property, stats); } public boolean containsProperty(Object property) { return propertySchemas.containsKey(property); } public PropertySchema getPropertySchema(Object property) { return propertySchemas.get(property); } public void recordObservation(PropertySchema propertySchema, Object value, PropertySchema.PropertyValueMetadata propertyValueMetadata) { if (propertySchema.isNullable()) { if (StringUtils.isNotEmpty(String.valueOf(value))) { propertySchemaStats.get(propertySchema.property()).recordObservation(propertyValueMetadata); } } else { propertySchemaStats.get(propertySchema.property()).recordObservation(propertyValueMetadata); } } public PropertySchemaStats getPropertySchemaStats(Object property) { return propertySchemaStats.get(property); } public Collection<PropertySchema> propertySchemas() { return propertySchemas.values(); } public Collection<PropertySchemaStats> propertySchemaStats() { return propertySchemaStats.values(); } public int propertyCount() { return propertySchemas.size(); } public Label label() { return label; } public LabelSchema createCopy() { LabelSchema result = new LabelSchema(label.createCopy()); for (PropertySchema schema : propertySchemas.values()) { Object property = schema.property(); result.put(property, schema.createCopy(), propertySchemaStats.get(property).createCopy()); } return result; } public void initStats() { Set<Object> keys = propertySchemaStats.keySet(); for (Object key : keys) { PropertySchemaStats oldStats = this.propertySchemaStats.get(key); this.propertySchemaStats.put( key, oldStats.createLockedCopyForFreshObservations()); } } public LabelSchema union(LabelSchema other) { LabelSchema result = createCopy(); for (PropertySchema otherSchema : other.propertySchemas()) { Object property = otherSchema.property(); PropertySchemaStats otherSchemaStats = other.getPropertySchemaStats(property); if (result.containsProperty(property)) { PropertySchema oldSchema = result.getPropertySchema(property); PropertySchema newSchema = oldSchema.union(otherSchema); PropertySchemaStats oldStats = result.getPropertySchemaStats(property); PropertySchemaStats newStats = oldStats.union(otherSchemaStats); result.put(property, newSchema, newStats); } else { result.put(property, otherSchema.createCopy(), otherSchemaStats.createCopy()); } } return result; } public boolean isSameAs(LabelSchema other) { if (!label().equals(other.label())) { return false; } if (propertySchemas().size() != other.propertySchemas().size()) { return false; } Iterator<PropertySchema> thisIterator = propertySchemas().iterator(); Iterator<PropertySchema> otherIterator = other.propertySchemas().iterator(); while (thisIterator.hasNext()) { PropertySchema thisPropertySchema = thisIterator.next(); PropertySchema otherPropertySchema = otherIterator.next(); if (!thisPropertySchema.equals(otherPropertySchema)) { return false; } } return true; } }
4,316
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/PropertySchema.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import java.util.*; public class PropertySchema { private final Object property; private final boolean inferDataType; private boolean isNullable; private DataType dataType; private boolean isMultiValue; private final EnumSet<DataType> allTypes; public PropertySchema(Object property) { this(property, false, DataType.None, false, EnumSet.noneOf(DataType.class)); } public PropertySchema(Object property, boolean isNullable, DataType dataType, boolean isMultiValue, EnumSet<DataType> allTypes) { this.property = property; this.inferDataType = dataType == DataType.None; this.isNullable = isNullable; this.dataType = dataType; this.isMultiValue = isMultiValue; this.allTypes = allTypes; } public Object property() { return property; } public PropertyValueMetadata accept(Object value, boolean updateDataType) { /* What should we do of the user specifies a datatype in a filter, but the actual values cannot be cast to that type? At present, neptune-export will respect the user-specified type in the output schema (config.json), and in CSV headers (if appropriate for export format). But perhaps the tool should seek to guarantee that the output schema allows for all values in the exported dataset? */ PropertyValueMetadata propertyValueMetadata = new PropertyValueMetadata(); int size = 1; if (isList(value)) { List<?> values = (List<?>) value; size = values.size(); if (size != 1) { isMultiValue = true; } if (inferDataType || updateDataType) { for (Object v : values) { DataType newType = DataType.dataTypeFor(v.getClass()); allTypes.add(newType); propertyValueMetadata.updateFor(newType); dataType = DataType.getBroadestType(dataType, newType); } } } else { if (inferDataType || updateDataType) { DataType newType = DataType.dataTypeFor(value.getClass()); allTypes.add(newType); propertyValueMetadata.updateFor(newType); dataType = DataType.getBroadestType(dataType, newType); } } return propertyValueMetadata; } public void makeNullable() { isNullable = true; } private boolean isList(Object value) { return value instanceof List<?>; } public DataType dataType() { return dataType; } public boolean isMultiValue() { return isMultiValue; } public boolean isNullable() { return isNullable; } public Collection<DataType> allTypes() { return allTypes; } public String nameWithDataType(boolean escapeCharacters) { return isMultiValue ? String.format("%s%s[]", propertyName(property, escapeCharacters), dataType.typeDescription()) : String.format("%s%s", propertyName(property, escapeCharacters), dataType.typeDescription()); } public String nameWithoutDataType(boolean escapeCharacters) { return propertyName(property, escapeCharacters); } public String nameWithDataType() { return nameWithDataType(false); } public String nameWithoutDataType() { return nameWithoutDataType(false); } private String propertyName(Object key, boolean escapeCharacters) { if (key.equals(org.apache.tinkerpop.gremlin.structure.T.label)) { return "~label"; } if (key.equals(org.apache.tinkerpop.gremlin.structure.T.id)) { return "~id"; } if (key.equals(org.apache.tinkerpop.gremlin.structure.T.key)) { return "~key"; } if (key.equals(org.apache.tinkerpop.gremlin.structure.T.value)) { return "~value"; } if (escapeCharacters) { return String.valueOf(key).replace(":", "\\:"); } else { return String.valueOf(key); } } @Override public String toString() { return "PropertySchema{" + "property=" + property + ", isNullable=" + isNullable + ", dataType=" + dataType + ", isMultiValue=" + isMultiValue + ", allTypes=" + allTypes + '}'; } public PropertySchema createCopy() { return new PropertySchema(property.toString(), isNullable, dataType, isMultiValue, allTypes); } public PropertySchema union(PropertySchema other) { if (other.isMultiValue() == isMultiValue && other.dataType() == dataType && other.isNullable() == isNullable) { return this; } boolean newIsNullable = other.isNullable() || isNullable; boolean newIsMultiValue = other.isMultiValue() || isMultiValue; DataType newDataType = DataType.getBroadestType(dataType, other.dataType()); EnumSet<DataType> unionAllTypes = allTypes.clone(); unionAllTypes.addAll(other.allTypes); return new PropertySchema( property.toString(), newIsNullable, newDataType, newIsMultiValue, unionAllTypes ); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PropertySchema schema = (PropertySchema) o; return isNullable == schema.isNullable && isMultiValue == schema.isMultiValue && property.equals(schema.property) && dataType == schema.dataType; } @Override public int hashCode() { return Objects.hash(property, isNullable, dataType, isMultiValue); } public static class PropertyValueMetadata { private final EnumMap<DataType, Integer> dataTypeCounts = new EnumMap<DataType, Integer>(DataType.class); public int size(){ int i = 0; for (Integer value : dataTypeCounts.values()) { i += value; } return i; } void updateFor(DataType dataType){ int i = dataTypeCounts.containsKey(dataType) ? dataTypeCounts.get(dataType): 0; dataTypeCounts.put(dataType, i + 1); } public void addTo(EnumMap<DataType, Integer> m){ for (Map.Entry<DataType, Integer> entry : dataTypeCounts.entrySet()) { DataType key = entry.getKey(); int i = m.containsKey(key) ? m.get(key) : 0; m.put(key, i + entry.getValue()); } } } }
4,317
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/FileSpecificLabelSchema.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat; public class FileSpecificLabelSchema { private final String outputId; private final PropertyGraphExportFormat format; private final LabelSchema labelSchema; public FileSpecificLabelSchema(String outputId, PropertyGraphExportFormat format, LabelSchema labelSchema) { this.outputId = outputId; this.format = format; this.labelSchema = labelSchema; } public String outputId() { return outputId; } public PropertyGraphExportFormat getFormat() { return format; } public LabelSchema labelSchema() { return labelSchema; } }
4,318
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/CreateGraphSchemaFromScan.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.util.Activity; import com.amazonaws.services.neptune.util.Timer; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Collection; public class CreateGraphSchemaFromScan implements CreateGraphSchemaCommand { private final Collection<ExportSpecification> exportSpecifications; private final GraphTraversalSource g; public CreateGraphSchemaFromScan(Collection<ExportSpecification> exportSpecifications, GraphTraversalSource g) { this.exportSpecifications = exportSpecifications; this.g = g; } @Override public GraphSchema execute() { GraphSchema graphSchema = new GraphSchema(); for (ExportSpecification exportSpecification : exportSpecifications) { Timer.timedActivity("creating " + exportSpecification.description() + " schema from graph scan", (Activity.Runnable) () -> { System.err.println("Creating " + exportSpecification.description() + " schema"); exportSpecification.scan(graphSchema, g); }); } return graphSchema; } }
4,319
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/schema/CreateGraphSchemaFromSample.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.schema; import com.amazonaws.services.neptune.util.Activity; import com.amazonaws.services.neptune.util.Timer; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import java.util.Collection; public class CreateGraphSchemaFromSample implements CreateGraphSchemaCommand { private final Collection<ExportSpecification> exportSpecifications; private final GraphTraversalSource g; private final long sampleSize; public CreateGraphSchemaFromSample(Collection<ExportSpecification> exportSpecifications, GraphTraversalSource g, long sampleSize) { this.exportSpecifications = exportSpecifications; this.sampleSize = sampleSize; this.g = g; } @Override public GraphSchema execute() { GraphSchema graphSchema = new GraphSchema(); for (ExportSpecification exportSpecification : exportSpecifications) { Timer.timedActivity("creating " + exportSpecification.description() + " schema from sampling graph", (Activity.Runnable) () -> { System.err.println("Creating " + exportSpecification.description() + " schema"); exportSpecification.sample(graphSchema, g, sampleSize); }); } return graphSchema; } }
4,320
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/propertygraph/airline/NameQueriesTypeConverter.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.propertygraph.airline; import com.amazonaws.services.neptune.propertygraph.NamedQueries; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.rvesse.airline.model.ArgumentsMetadata; import com.github.rvesse.airline.model.OptionMetadata; import com.github.rvesse.airline.parser.ParseState; import com.github.rvesse.airline.types.TypeConverter; import com.github.rvesse.airline.types.TypeConverterProvider; import com.github.rvesse.airline.types.numerics.NumericTypeConverter; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; public class NameQueriesTypeConverter implements TypeConverter, TypeConverterProvider { @Override public Object convert(String s, Class<?> aClass, String value) { ObjectMapper objectMapper = new ObjectMapper(); try { JsonNode jsonNode = objectMapper.readTree(value); return NamedQueries.fromJson(jsonNode); } catch (JsonProcessingException e) { // Not JSON representation of queries, so continue } int i = value.indexOf("="); String name; String gremlinQueries; if (i < 0){ name = "query"; gremlinQueries = value; } else { name = value.substring(0, i).trim(); gremlinQueries = value.substring(i + 1); } List<String> queries = Arrays.stream(gremlinQueries.split(";")). map(String::trim). collect(Collectors.toList()); return new NamedQueries(name, queries); } @Override public void setNumericConverter(NumericTypeConverter numericTypeConverter) { // Do nothing } @Override public <T> TypeConverter getTypeConverter(OptionMetadata optionMetadata, ParseState<T> parseState) { return this; } @Override public <T> TypeConverter getTypeConverter(ArgumentsMetadata argumentsMetadata, ParseState<T> parseState) { return this; } }
4,321
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/ProfilesConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles; import java.util.Collection; public class ProfilesConfig { private final Collection<String> profiles; public ProfilesConfig(Collection<String> profiles) { this.profiles = profiles; } public boolean containsProfile(String profile){ return profiles.contains(profile); } }
4,322
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/incremental_export/IncrementalExportEventHandler.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.incremental_export; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.cluster.EventId; import com.amazonaws.services.neptune.cluster.GetLastEventId; import com.amazonaws.services.neptune.cluster.NeptuneClusterMetadata; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.export.CompletionFileWriter; import com.amazonaws.services.neptune.export.ExportToS3NeptuneExportEventHandler; import com.amazonaws.services.neptune.export.NeptuneExportServiceEventHandler; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.io.PropertyGraphExportFormat; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.rdf.io.RdfExportFormat; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.Timer; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import org.slf4j.LoggerFactory; import java.util.concurrent.atomic.AtomicLong; public class IncrementalExportEventHandler implements NeptuneExportServiceEventHandler, CompletionFileWriter { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(IncrementalExportEventHandler.class); private final long timestamp; private final AtomicLong commitNum = new AtomicLong(0); private final AtomicLong opNum = new AtomicLong(0); private final String exportId; private final String stageId; private final String command; public IncrementalExportEventHandler(ObjectNode additionalParams) { this.timestamp = System.currentTimeMillis(); JsonNode incrementalExport = additionalParams.path("incremental_export"); this.exportId = incrementalExport.path("exportId").textValue();; this.stageId = incrementalExport.path("stageId").textValue(); this.command = incrementalExport.path("command").textValue(); logger.info("Incremental export params: exportId: {}, stageId: {}, command: {}", exportId, stageId, command); } @Override public void updateCompletionFile(ObjectNode completionFilePayload) { ArrayNode partitions = JsonNodeFactory.instance.arrayNode(); ObjectNode partition = JsonNodeFactory.instance.objectNode(); partition.put("name", "timestamp"); partition.put("value", String.valueOf(timestamp)); partitions.add(partition); ObjectNode lastEventId = JsonNodeFactory.instance.objectNode(); lastEventId.put("commitNum", commitNum.get()); lastEventId.put("opNum", opNum.get()); ObjectNode incrementalExportNode = JsonNodeFactory.instance.objectNode(); completionFilePayload.set("incrementalExport", incrementalExportNode); incrementalExportNode.put("exportId", exportId); incrementalExportNode.put("stageId", stageId); incrementalExportNode.set("partitions", partitions); incrementalExportNode.set("lastEventId", lastEventId); } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { if (args.contains("--format")) { args.removeOptions("--format"); } if (args.contains("--partition-directories")) { args.removeOptions("--partition-directories"); } boolean createExportSubdirectory = true; if (command.equals("apply")){ args.addOption("--partition-directories", String.format("timestamp=%s", timestamp)); createExportSubdirectory = false; if (args.contains("export-pg")) { args.addOption("--format", PropertyGraphExportFormat.neptuneStreamsSimpleJson.name()); } else { args.addOption("--format", RdfExportFormat.neptuneStreamsSimpleJson.name()); } } else { if (args.contains("export-pg")) { args.addOption("--format", PropertyGraphExportFormat.csv.name()); } else { args.addOption("--format", RdfExportFormat.nquads.name()); } } s3UploadParams.setCreateExportSubdirectory(createExportSubdirectory).setOverwriteExisting(true); } @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { onExportComplete(directories, stats, cluster, null); } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { Timer.timedActivity("getting LastEventId from stream", (CheckedActivity.Runnable) () -> getLastEventIdFromStream(cluster, graphSchema == null ? "sparql" : "gremlin")); } private void getLastEventIdFromStream(Cluster cluster, String streamEndpointType) { EventId eventId = new GetLastEventId(cluster.clusterMetadata(), cluster.connectionConfig(), streamEndpointType).execute(); if (eventId != null) { commitNum.set(eventId.commitNum()); opNum.set(eventId.opNum()); } } }
4,323
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/NeptuneMachineLearningExportEventHandlerV2.java
package com.amazonaws.services.neptune.profiles.neptune_ml; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.export.ExportToS3NeptuneExportEventHandler; import com.amazonaws.services.neptune.export.NeptuneExportServiceEventHandler; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.PropertyGraphTrainingDataConfigWriterV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.RdfTrainingDataConfigWriter; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.io.CsvPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.JsonPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.S3ObjectInfo; import com.amazonaws.services.neptune.util.Timer; import com.amazonaws.services.neptune.util.TransferManagerWrapper; import com.amazonaws.services.s3.Headers; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.SSEAlgorithm; import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.Upload; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang.StringUtils; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import static com.amazonaws.services.neptune.export.NeptuneExportService.NEPTUNE_ML_PROFILE_NAME; public class NeptuneMachineLearningExportEventHandlerV2 implements NeptuneExportServiceEventHandler { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(NeptuneMachineLearningExportEventHandlerV2.class); private final String outputS3Path; private final String s3Region; private final Args args; private final NeptuneMLSourceDataModel dataModel; private final Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection; private final Collection<String> profiles; private final boolean createExportSubdirectory; private final PrinterOptions printerOptions; private final boolean includeEdgeFeatures; private final String sseKmsKeyId; private final AWSCredentialsProvider s3CredentialsProvider; public NeptuneMachineLearningExportEventHandlerV2(String outputS3Path, String s3Region, boolean createExportSubdirectory, ObjectNode additionalParams, Args args, Collection<String> profiles, String sseKmsKeyId, AWSCredentialsProvider s3CredentialsProvider) { logger.info("Adding neptune_ml event handler"); CsvPrinterOptions csvPrinterOptions = CsvPrinterOptions.builder() .setMultiValueSeparator(";") .setEscapeCsvHeaders(args.contains("--escape-csv-headers")) .build(); JsonPrinterOptions jsonPrinterOptions = JsonPrinterOptions.builder() .setStrictCardinality(true) .build(); this.outputS3Path = outputS3Path; this.s3Region = s3Region; this.createExportSubdirectory = createExportSubdirectory; this.args = args; this.dataModel = args.contains("export-rdf") ? NeptuneMLSourceDataModel.RDF : NeptuneMLSourceDataModel.PropertyGraph; this.trainingJobWriterConfigCollection = createTrainingJobConfigCollection(additionalParams); this.profiles = profiles; this.printerOptions = new PrinterOptions(csvPrinterOptions, jsonPrinterOptions); this.includeEdgeFeatures = shouldIncludeEdgeFeatures(additionalParams); this.sseKmsKeyId = sseKmsKeyId; this.s3CredentialsProvider = s3CredentialsProvider; } private boolean shouldIncludeEdgeFeatures(ObjectNode additionalParams) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); if (neptuneMlNode.isMissingNode()){ return true; } if (neptuneMlNode.has("disableEdgeFeatures") && neptuneMlNode.path("disableEdgeFeatures").asBoolean()){ return false; } return true; } private Collection<TrainingDataWriterConfigV2> createTrainingJobConfigCollection(ObjectNode additionalParams) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); if (neptuneMlNode.isMissingNode()) { logger.info("No 'neptune_ml' config node in additional params so creating default training config"); return Collections.singletonList(new TrainingDataWriterConfigV2()); } else { Collection<TrainingDataWriterConfigV2> trainingJobWriterConfig = TrainingDataWriterConfigV2.fromJson(neptuneMlNode, this.dataModel); logger.info("Training job writer config: {}", trainingJobWriterConfig); return trainingJobWriterConfig; } } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { logger.info("ARGS: {}", args.toString()); dataModel.updateArgsBeforeExport(args, trainingJobWriterConfigCollection); if (args.contains("--export-id")) { args.removeOptions("--export-id"); } args.addOption("--export-id", new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date())); } @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { onExportComplete(directories, stats, cluster, new GraphSchema()); } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { PropertyName propertyName = args.contains("--exclude-type-definitions") ? PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITHOUT_DATATYPE : PropertyGraphTrainingDataConfigWriterV2.COLUMN_NAME_WITH_DATATYPE; try (TransferManagerWrapper transferManager = new TransferManagerWrapper(s3Region, s3CredentialsProvider)) { for (TrainingDataWriterConfigV2 trainingJobWriterConfig : trainingJobWriterConfigCollection) { createTrainingJobConfigurationFile(trainingJobWriterConfig, directories.rootDirectory(), graphSchema, propertyName, transferManager); } } } private void createTrainingJobConfigurationFile(TrainingDataWriterConfigV2 trainingDataWriterConfig, Path outputPath, GraphSchema graphSchema, PropertyName propertyName, TransferManagerWrapper transferManager) throws Exception { File outputDirectory = outputPath.toFile(); String filename = String.format("%s.json", trainingDataWriterConfig.name()); File trainingJobConfigurationFile = new File(outputPath.toFile(), filename); try (Writer writer = new PrintWriter(trainingJobConfigurationFile)) { if (dataModel == NeptuneMLSourceDataModel.RDF) { Collection<String> filenames = new ArrayList<>(); File[] directories = outputDirectory.listFiles(File::isDirectory); for (File directory : directories) { File[] files = directory.listFiles(File::isFile); for (File file : files) { filenames.add(outputDirectory.toPath().relativize(file.toPath()).toString()); } } new RdfTrainingDataConfigWriter( filenames, createJsonGenerator(writer), trainingDataWriterConfig).write(); } else { new PropertyGraphTrainingDataConfigWriterV2( graphSchema, createJsonGenerator(writer), propertyName, printerOptions, trainingDataWriterConfig).write(includeEdgeFeatures); } } if (StringUtils.isNotEmpty(outputS3Path)) { Timer.timedActivity("uploading training job configuration file to S3", (CheckedActivity.Runnable) () -> { S3ObjectInfo outputS3ObjectInfo = calculateOutputS3Path(outputDirectory); uploadTrainingJobConfigurationFileToS3( filename, transferManager.get(), trainingJobConfigurationFile, outputS3ObjectInfo); }); } } private void uploadTrainingJobConfigurationFileToS3(String filename, TransferManager transferManager, File trainingJobConfigurationFile, S3ObjectInfo outputS3ObjectInfo) throws IOException { S3ObjectInfo s3ObjectInfo = outputS3ObjectInfo.withNewKeySuffix(filename); try (InputStream inputStream = new FileInputStream(trainingJobConfigurationFile)) { PutObjectRequest putObjectRequest = new PutObjectRequest(s3ObjectInfo.bucket(), s3ObjectInfo.key(), inputStream, S3ObjectInfo.createObjectMetadata(trainingJobConfigurationFile.length(),sseKmsKeyId)) .withTagging(ExportToS3NeptuneExportEventHandler.createObjectTags(profiles)); Upload upload = transferManager.upload(putObjectRequest); upload.waitForUploadResult(); } catch (InterruptedException e) { logger.warn(e.getMessage()); Thread.currentThread().interrupt(); } } private S3ObjectInfo calculateOutputS3Path(File outputDirectory) { S3ObjectInfo outputBaseS3ObjectInfo = new S3ObjectInfo(outputS3Path); if (createExportSubdirectory) { return outputBaseS3ObjectInfo.withNewKeySuffix(outputDirectory.getName()); } else { return outputBaseS3ObjectInfo; } } private JsonGenerator createJsonGenerator(Writer writer) throws IOException { JsonGenerator generator = new JsonFactory().createGenerator(writer); generator.setPrettyPrinter(new DefaultPrettyPrinter()); return generator; } }
4,324
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/NeptuneMLSourceDataModel.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseProperty; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.LabelConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.RdfTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseNodeTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseRdfTaskType; import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.rdf.RdfExportScope; import com.amazonaws.services.neptune.rdf.io.RdfExportFormat; import com.fasterxml.jackson.databind.JsonNode; import java.util.Collection; public enum NeptuneMLSourceDataModel { PropertyGraph { @Override void updateArgsBeforeExport(Args args, Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection) { if (!args.contains("--exclude-type-definitions")) { args.addFlag("--exclude-type-definitions"); } if (args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeLabelsOnly.name())) { args.removeOptions("--edge-label-strategy"); } if (!args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name())) { args.addOption("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name()); } if (!args.contains("--merge-files")) { args.addFlag("--merge-files"); } if (args.contains("export-pg") && args.containsAny("--config", "--filter", "-c", "--config-file", "--filter-config-file")) { args.replace("export-pg", "export-pg-from-config"); } } @Override public String nodeTypeName() { return "Label"; } @Override public String nodeAttributeNameSingular() { return "Property"; } @Override public String nodeAttributeNamePlural() { return "Properties"; } @Override public String parseTaskType(JsonNode json, ParsingContext propertyContext, Label nodeType, String property) { return new ParseNodeTaskTypeV2(json, propertyContext).parseTaskType().name(); } @Override public String parseProperty(JsonNode json, ParsingContext propertyContext, Label nodeType) { return new ParseProperty(json, propertyContext.withLabel(nodeType), this).parseSingleProperty(); } @Override public String labelFields() { return "'node' or 'edge'"; } @Override public boolean isRdfLinkPrediction(JsonNode json) { return false; } }, RDF { @Override void updateArgsBeforeExport(Args args, Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection) { args.removeOptions("--format"); args.addOption("--format", RdfExportFormat.ntriples.name()); boolean exportEdgesOnly = true; for (TrainingDataWriterConfigV2 trainingDataWriterConfigV2 : trainingJobWriterConfigCollection) { for (LabelConfigV2 labelConfig : trainingDataWriterConfigV2.nodeConfig().getAllClassificationSpecifications()) { String taskType = labelConfig.taskType(); if (taskType.equals(RdfTaskTypeV2.classification.name()) || taskType.equals(RdfTaskTypeV2.regression.name())){ exportEdgesOnly = false; } } } if (!args.contains("--rdf-export-scope") && exportEdgesOnly){ args.addOption("--rdf-export-scope", RdfExportScope.edges.name()); } } @Override public String nodeTypeName() { return "Class"; } @Override public String nodeAttributeNameSingular() { return "Predicate"; } @Override public String nodeAttributeNamePlural() { return "Predicates"; } @Override public String parseTaskType(JsonNode json, ParsingContext propertyContext, Label nodeType, String property) { RdfTaskTypeV2 taskType = new ParseRdfTaskType(json, propertyContext).parseTaskType(); taskType.validate(property, nodeType); return taskType.name(); } @Override public String parseProperty(JsonNode json, ParsingContext propertyContext, Label nodeType) { return new ParseProperty(json, propertyContext.withLabel(nodeType), this).parseNullableSingleProperty(); } @Override public String labelFields() { return "'node'"; } @Override public boolean isRdfLinkPrediction(JsonNode json) { return parseTaskType(json, new ParsingContext("RDF target"), null, null).equals(RdfTaskTypeV2.link_prediction.name()); } }; abstract void updateArgsBeforeExport(Args args, Collection<TrainingDataWriterConfigV2> trainingJobWriterConfigCollection); public abstract String nodeTypeName(); public abstract String nodeAttributeNameSingular(); public abstract String nodeAttributeNamePlural(); public abstract String parseTaskType(JsonNode json, ParsingContext propertyContext, Label nodeType, String property); public abstract String parseProperty(JsonNode json, ParsingContext propertyContext, Label nodeType); public abstract String labelFields(); public abstract boolean isRdfLinkPrediction(JsonNode json); }
4,325
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/NeptuneMachineLearningExportEventHandlerV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.export.Args; import com.amazonaws.services.neptune.export.ExportToS3NeptuneExportEventHandler; import com.amazonaws.services.neptune.export.NeptuneExportServiceEventHandler; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.PropertyGraphTrainingDataConfigWriterV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.TrainingDataWriterConfigV1; import com.amazonaws.services.neptune.propertygraph.EdgeLabelStrategy; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.io.CsvPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.JsonPrinterOptions; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import com.amazonaws.services.neptune.util.CheckedActivity; import com.amazonaws.services.neptune.util.S3ObjectInfo; import com.amazonaws.services.neptune.util.Timer; import com.amazonaws.services.neptune.util.TransferManagerWrapper; import com.amazonaws.services.s3.Headers; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.SSEAlgorithm; import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.Upload; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang.StringUtils; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Collections; import java.util.Date; import static com.amazonaws.services.neptune.export.NeptuneExportService.NEPTUNE_ML_PROFILE_NAME; public class NeptuneMachineLearningExportEventHandlerV1 implements NeptuneExportServiceEventHandler { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(NeptuneMachineLearningExportEventHandlerV1.class); private final String outputS3Path; private final String s3Region; private final Args args; private final Collection<TrainingDataWriterConfigV1> trainingJobWriterConfigCollection; private final Collection<String> profiles; private final boolean createExportSubdirectory; private final PrinterOptions printerOptions; private final String sseKmsKeyId; private final AWSCredentialsProvider s3CredentialsProvider; public NeptuneMachineLearningExportEventHandlerV1(String outputS3Path, String s3Region, boolean createExportSubdirectory, ObjectNode additionalParams, Args args, Collection<String> profiles, String sseKmsKeyId, AWSCredentialsProvider s3CredentialsProvider) { logger.info("Adding neptune_ml event handler"); CsvPrinterOptions csvPrinterOptions = CsvPrinterOptions.builder() .setMultiValueSeparator(";") .setEscapeCsvHeaders(args.contains("--escape-csv-headers")) .build(); JsonPrinterOptions jsonPrinterOptions = JsonPrinterOptions.builder() .setStrictCardinality(true) .build(); this.outputS3Path = outputS3Path; this.s3Region = s3Region; this.createExportSubdirectory = createExportSubdirectory; this.args = args; this.trainingJobWriterConfigCollection = createTrainingJobConfigCollection(additionalParams); this.profiles = profiles; this.printerOptions = new PrinterOptions(csvPrinterOptions, jsonPrinterOptions); this.sseKmsKeyId = sseKmsKeyId; this.s3CredentialsProvider = s3CredentialsProvider; } private Collection<TrainingDataWriterConfigV1> createTrainingJobConfigCollection(ObjectNode additionalParams) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); if (neptuneMlNode.isMissingNode()) { logger.info("No 'neptune_ml' config node in additional params so creating default training config"); return Collections.singletonList(new TrainingDataWriterConfigV1()); } else { Collection<TrainingDataWriterConfigV1> trainingJobWriterConfig = TrainingDataWriterConfigV1.fromJson(neptuneMlNode); logger.info("Training job writer config: {}", trainingJobWriterConfig); return trainingJobWriterConfig; } } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { if (args.contains("export-pg")) { if (!args.contains("--exclude-type-definitions")) { args.addFlag("--exclude-type-definitions"); } if (args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeLabelsOnly.name())) { args.removeOptions("--edge-label-strategy"); } if (!args.contains("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name())) { args.addOption("--edge-label-strategy", EdgeLabelStrategy.edgeAndVertexLabels.name()); } if (args.containsAny("--config", "--filter", "-c", "--config-file", "--filter-config-file")){ args.replace("export-pg", "export-pg-from-config"); } if (!args.contains("--merge-files")) { args.addFlag("--merge-files"); } } if (args.contains("--export-id")) { args.removeOptions("--export-id"); } args.addOption("--export-id", new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date())); } @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { //Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { PropertyName propertyName = args.contains("--exclude-type-definitions") ? PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITHOUT_DATATYPE : PropertyGraphTrainingDataConfigWriterV1.COLUMN_NAME_WITH_DATATYPE; try (TransferManagerWrapper transferManager = new TransferManagerWrapper(s3Region, s3CredentialsProvider)) { for (TrainingDataWriterConfigV1 trainingJobWriterConfig : trainingJobWriterConfigCollection) { createTrainingJobConfigurationFile(trainingJobWriterConfig, directories.rootDirectory(), graphSchema, propertyName, transferManager); } } } private void createTrainingJobConfigurationFile(TrainingDataWriterConfigV1 trainingJobWriterConfig, Path outputPath, GraphSchema graphSchema, PropertyName propertyName, TransferManagerWrapper transferManager) throws Exception { File outputDirectory = outputPath.toFile(); String filename = String.format("%s.json", trainingJobWriterConfig.name()); File trainingJobConfigurationFile = new File(outputPath.toFile(), filename); try (Writer writer = new PrintWriter(trainingJobConfigurationFile)) { new PropertyGraphTrainingDataConfigWriterV1( graphSchema, createJsonGenerator(writer), propertyName, printerOptions, trainingJobWriterConfig).write(); } if (StringUtils.isNotEmpty(outputS3Path)) { Timer.timedActivity("uploading training job configuration file to S3", (CheckedActivity.Runnable) () -> { S3ObjectInfo outputS3ObjectInfo = calculateOutputS3Path(outputDirectory); uploadTrainingJobConfigurationFileToS3( filename, transferManager.get(), trainingJobConfigurationFile, outputS3ObjectInfo); }); } } private void uploadTrainingJobConfigurationFileToS3(String filename, TransferManager transferManager, File trainingJobConfigurationFile, S3ObjectInfo outputS3ObjectInfo) throws IOException { S3ObjectInfo s3ObjectInfo = outputS3ObjectInfo.withNewKeySuffix(filename); try (InputStream inputStream = new FileInputStream(trainingJobConfigurationFile)) { ObjectMetadata objectMetadata = new ObjectMetadata(); PutObjectRequest putObjectRequest = new PutObjectRequest(s3ObjectInfo.bucket(), s3ObjectInfo.key(), inputStream, S3ObjectInfo.createObjectMetadata(trainingJobConfigurationFile.length(), sseKmsKeyId)) .withTagging(ExportToS3NeptuneExportEventHandler.createObjectTags(profiles)); Upload upload = transferManager.upload(putObjectRequest); upload.waitForUploadResult(); } catch (InterruptedException e) { logger.warn(e.getMessage()); Thread.currentThread().interrupt(); } } private S3ObjectInfo calculateOutputS3Path(File outputDirectory) { S3ObjectInfo outputBaseS3ObjectInfo = new S3ObjectInfo(outputS3Path); if (createExportSubdirectory) { return outputBaseS3ObjectInfo.withNewKeySuffix(outputDirectory.getName()); } else { return outputBaseS3ObjectInfo; } } private JsonGenerator createJsonGenerator(Writer writer) throws IOException { JsonGenerator generator = new JsonFactory().createGenerator(writer); generator.setPrettyPrinter(new DefaultPrettyPrinter()); return generator; } }
4,326
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigWriterV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.fasterxml.jackson.core.JsonGenerator; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; public class PropertyGraphTrainingDataConfigWriterV1 { public static final PropertyName COLUMN_NAME_WITH_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithDataType(); } }; public static final PropertyName COLUMN_NAME_WITHOUT_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithoutDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithoutDataType(); } }; private final GraphSchema graphSchema; private final JsonGenerator generator; private final PropertyName propertyName; private final TrainingDataWriterConfigV1 config; private final PrinterOptions printerOptions; private final Collection<String> warnings = new ArrayList<>(); public PropertyGraphTrainingDataConfigWriterV1(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions) { this(graphSchema, generator, propertyName, printerOptions, new TrainingDataWriterConfigV1()); } public PropertyGraphTrainingDataConfigWriterV1(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions, TrainingDataWriterConfigV1 config ) { this.graphSchema = graphSchema; this.generator = generator; this.propertyName = propertyName; this.printerOptions = printerOptions; this.config = config; } public void write() throws IOException { generator.writeStartObject(); generator.writeArrayFieldStart("graph"); writeNodes(); writeEdges(); generator.writeEndArray(); generator.writeArrayFieldStart("warnings"); writeWarnings(); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); } private void writeWarnings() throws IOException { for (String warning : warnings) { generator.writeString(warning); } } private void writeNodes() throws IOException { GraphElementType graphElementType = GraphElementType.nodes; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); for (Label nodeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(nodeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(nodeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeSeparator(","); if (config.hasNodeClassificationSpecificationForNode(nodeLabel)) { writeNodeLabel(labelSchema, config.getNodeClassificationPropertyForNode(nodeLabel)); } writeNodeFeatures(nodeLabel, labelSchema.propertySchemas(), labelSchema); generator.writeEndObject(); } } } private void writeNodeLabel(LabelSchema labelSchema, LabelConfigV1 labelConfig) throws IOException { Label label = labelSchema.label(); if (labelSchema.containsProperty(labelConfig.property())) { generator.writeArrayFieldStart("labels"); PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); generator.writeStartObject(); generator.writeStringField("label_type", "node"); generator.writeStringField("sub_label_type", labelConfig.labelType()); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); writeSplitRates(labelConfig); if (propertySchema.isMultiValue()) { writeSeparator(";"); } generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); generator.writeEndArray(); } else { warnings.add( String.format("Unable to add node class label: Node of type '%s' does not contain property '%s'.", label.fullyQualifiedLabel(), labelConfig.property())); } } private void writeSplitRates(LabelConfigV1 labelConfig) throws IOException { generator.writeArrayFieldStart("split_rate"); for (Double rate : labelConfig.splitRates()) { generator.writeNumber(rate); } generator.writeEndArray(); } private void writeNodeFeatures(Label label, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { boolean arrayStartHasBeenWritten = false; for (PropertySchema propertySchema : propertySchemas) { String column = propertySchema.nameWithoutDataType(); if (!config.isNodeClassificationPropertyForNode(label, column)) { if (!arrayStartHasBeenWritten) { generator.writeArrayFieldStart("features"); arrayStartHasBeenWritten = true; } if (!config.hasNodeFeatureOverrideForNodeProperty(label, column)) { writeNodeFeature(label, propertySchema, labelSchema); } } } for (FeatureOverrideConfigV1 featureOverride : config.getNodeFeatureOverrides(label)) { writeNodeFeatureOverride(label, featureOverride, propertySchemas, labelSchema); } if (arrayStartHasBeenWritten) { generator.writeEndArray(); } } private void writeNodeFeature(Label label, PropertySchema propertySchema, LabelSchema labelSchema) throws IOException { if (propertySchema.dataType() == DataType.Float || propertySchema.dataType() == DataType.Double) { writeNumericalNodeFeature(label, Collections.singletonList(propertySchema), Norm.min_max, labelSchema); } if (propertySchema.dataType() == DataType.Byte || propertySchema.dataType() == DataType.Short || propertySchema.dataType() == DataType.Integer || propertySchema.dataType() == DataType.Long) { writeNumericalNodeFeature(label, Collections.singletonList(propertySchema), Norm.min_max, labelSchema); } if (propertySchema.dataType() == DataType.String || propertySchema.dataType() == DataType.Boolean) { writeCategoricalNodeFeature(label, Collections.singletonList(propertySchema)); } } private void writeNodeFeatureOverride(Label label, FeatureOverrideConfigV1 featureOverride, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { if (featureOverride.isSinglePropertyOverride()) { PropertySchema propertySchema = propertySchemas.stream() .filter(p -> p.nameWithoutDataType().equals(featureOverride.firstProperty())) .findFirst() .orElse(null); if (propertySchema == null) { warnings.add(String.format("Unable to add node feature: Node of type '%s' does not contain property '%s'.", label.fullyQualifiedLabel(), featureOverride.firstProperty())); } else { FeatureTypeV1 featureType = featureOverride.featureType(); if (FeatureTypeV1.category == featureType) { writeCategoricalNodeFeature(label, Collections.singletonList(propertySchema), featureOverride.separator()); } else if (FeatureTypeV1.numerical == featureType) { writeNumericalNodeFeature(label, Collections.singletonList(propertySchema), featureOverride.norm(), labelSchema, featureOverride.separator()); } } } else { boolean allPropertiesPresent = featureOverride.properties().stream() .allMatch(p -> propertySchemas.stream() .anyMatch(s -> s.nameWithoutDataType().equals(p))); if (!allPropertiesPresent) { warnings.add(String.format("Unable to add multi-property node feature: Node of type '%s' does not contain one or more of the following properties: %s.", label.fullyQualifiedLabel(), featureOverride.properties().stream() .map(s -> String.format("'%s'", s)) .collect(Collectors.joining(", ")))); } else { FeatureTypeV1 featureType = featureOverride.featureType(); List<PropertySchema> multiPropertySchemas = propertySchemas.stream() .filter(p -> featureOverride.properties().contains(p.nameWithoutDataType())) .collect(Collectors.toList()); if (FeatureTypeV1.category == featureType) { writeCategoricalNodeFeature(label, multiPropertySchemas); } else if (FeatureTypeV1.numerical == featureType) { writeNumericalNodeFeature(label, multiPropertySchemas, featureOverride.norm(), labelSchema); } } } } private void writeCategoricalNodeFeature(Label label, Collection<PropertySchema> propertySchemas) throws IOException { writeCategoricalNodeFeature(label, propertySchemas, new Separator()); } private void writeCategoricalNodeFeature(Label label, Collection<PropertySchema> propertySchemas, Separator separator) throws IOException { boolean isSinglePropertyFeature = propertySchemas.size() == 1; PropertySchema firstPropertySchema = propertySchemas.iterator().next(); if (isSinglePropertyFeature && config.hasWord2VecSpecification(label, firstPropertySchema.nameWithoutDataType())) { writeWord2VecFeature(label, firstPropertySchema); } else { generator.writeStartObject(); generator.writeStringField("feat_type", "node"); generator.writeStringField("sub_feat_type", "category"); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); for (PropertySchema propertySchema : propertySchemas) { generator.writeString(propertyName.escaped(propertySchema, printerOptions)); } generator.writeEndArray(); if (isSinglePropertyFeature) { separator.writeTo(generator, firstPropertySchema.isMultiValue()); } generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } } private void writeWord2VecFeature(Label label, PropertySchema propertySchema) throws IOException { Word2VecConfig word2VecConfig = config.getWord2VecSpecification(label, propertySchema.nameWithoutDataType()); generator.writeStartObject(); generator.writeStringField("feat_type", "node"); generator.writeStringField("sub_feat_type", "word2vec"); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); generator.writeArrayFieldStart("language"); for (String language : word2VecConfig.languages()) { generator.writeString(language); } generator.writeEndArray(); generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } private void writeNumericalNodeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema) throws IOException { writeNumericalNodeFeature(label, propertySchemas, norm, labelSchema, null); } private void writeNumericalNodeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema, Separator separator) throws IOException { boolean isSinglePropertyFeature = propertySchemas.size() == 1; PropertySchema firstPropertySchema = propertySchemas.iterator().next(); if (isSinglePropertyFeature && config.hasNumericalBucketSpecification(label, firstPropertySchema.nameWithoutDataType())) { writeNumericalBucketFeature(label, firstPropertySchema); } else { List<String> multiValueProperties = propertySchemas.stream() .filter(PropertySchema::isMultiValue) .map(PropertySchema::nameWithoutDataType) .collect(Collectors.toList()); if (!multiValueProperties.isEmpty()) { warnings.add(String.format("Unable to add numerical node feature: Node of type '%s' has one or more multi-value numerical properties: %s.", label.fullyQualifiedLabel(), multiValueProperties)); return; } generator.writeStartObject(); generator.writeStringField("feat_type", "node"); FeatureTypeV1.numerical.addTo(generator); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); for (PropertySchema propertySchema : propertySchemas) { generator.writeString(propertyName.escaped(propertySchema, printerOptions)); } generator.writeEndArray(); norm.addTo(generator); generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } } private void writeNumericalBucketFeature(Label label, PropertySchema propertySchema) throws IOException { NumericalBucketFeatureConfigV1 featureConfig = config.getNumericalBucketSpecification(label, propertySchema.nameWithoutDataType()); if (propertySchema.isMultiValue()) { warnings.add(String.format( "Unable to add numerical bucket feature: Property '%s' of node type '%s' is a multi-value property.", propertySchema.nameWithoutDataType(), label.fullyQualifiedLabel())); } else { generator.writeStartObject(); generator.writeStringField("feat_type", "node"); generator.writeStringField("sub_feat_type", "bucket_numerical"); generator.writeArrayFieldStart("cols"); generator.writeString("~id"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); generator.writeArrayFieldStart("range"); generator.writeObject(featureConfig.range().low()); generator.writeObject(featureConfig.range().high()); generator.writeEndArray(); generator.writeNumberField("bucket_cnt", featureConfig.bucketCount()); generator.writeNumberField("slide_window_size", featureConfig.slideWindowSize()); generator.writeStringField("node_type", label.labelsAsString()); generator.writeEndObject(); } } private void writeEdges() throws IOException { GraphElementType graphElementType = GraphElementType.edges; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); for (Label edgeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(edgeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(edgeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeSeparator(","); if (graphElementSchemas.getSchemaFor(edgeLabel).propertyCount() == 0) { generator.writeArrayFieldStart("edges"); generator.writeStartObject(); writeEdgeSpecType(); writeCols(); writeEdgeType(edgeLabel); generator.writeEndObject(); generator.writeEndArray(); } else { if (config.hasEdgeClassificationSpecificationForEdge(edgeLabel)) { writeEdgeLabel(labelSchema, config.getEdgeClassificationPropertyForEdge(edgeLabel)); } writeEdgeFeatures(edgeLabel, labelSchema.propertySchemas(), labelSchema); } generator.writeEndObject(); } } } private void writeEdgeFeatures(Label label, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { boolean arrayStartHasBeenWritten = false; for (PropertySchema propertySchema : propertySchemas) { if (!config.isEdgeClassificationPropertyForEdge(label, propertySchema.nameWithoutDataType())) { if (!arrayStartHasBeenWritten) { generator.writeArrayFieldStart("features"); arrayStartHasBeenWritten = true; } if (!propertySchema.isMultiValue()) { if (!config.hasEdgeFeatureOverrideForEdgeProperty(label, propertySchema.nameWithoutDataType())) { writeNumericalEdgeFeature(label, Collections.singletonList(propertySchema), Norm.min_max, labelSchema); } } } } for (FeatureOverrideConfigV1 featureOverride : config.getEdgeFeatureOverrides(label)) { writeEdgeFeatureOverride(label, featureOverride, propertySchemas, labelSchema); } if (arrayStartHasBeenWritten) { generator.writeEndArray(); } } private void writeEdgeFeatureOverride(Label label, FeatureOverrideConfigV1 featureOverride, Collection<PropertySchema> propertySchemas, LabelSchema labelSchema) throws IOException { if (featureOverride.isSinglePropertyOverride()) { PropertySchema propertySchema = propertySchemas.stream() .filter(p -> p.nameWithoutDataType().equals(featureOverride.firstProperty())) .findFirst() .orElse(null); if (propertySchema == null) { warnings.add(String.format("Unable to add edge feature: Edge of type '%s' does not contain property '%s'.", label.fullyQualifiedLabel(), featureOverride.firstProperty())); } else { FeatureTypeV1 featureType = featureOverride.featureType(); if (FeatureTypeV1.numerical == featureType) { writeNumericalEdgeFeature(label, Collections.singletonList(propertySchema), featureOverride.norm(), labelSchema, featureOverride.separator()); } } } else { boolean allPropertiesPresent = featureOverride.properties().stream() .allMatch(p -> propertySchemas.stream() .anyMatch(s -> s.nameWithoutDataType().equals(p))); if (!allPropertiesPresent) { warnings.add(String.format("Unable to add multi-property edge feature: Edge of type '%s' does not contain one or more of the following properties: %s.", label.fullyQualifiedLabel(), featureOverride.properties().stream() .map(s -> String.format("'%s'", s)) .collect(Collectors.joining(", ")))); } else { FeatureTypeV1 featureType = featureOverride.featureType(); List<PropertySchema> multiPropertySchemas = propertySchemas.stream() .filter(p -> featureOverride.properties().contains(p.nameWithoutDataType())) .collect(Collectors.toList()); if (FeatureTypeV1.numerical == featureType) { writeNumericalEdgeFeature(label, multiPropertySchemas, featureOverride.norm(), labelSchema); } } } } private void writeNumericalEdgeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema) throws IOException { writeNumericalEdgeFeature(label, propertySchemas, norm, labelSchema, new Separator()); } private void writeNumericalEdgeFeature(Label label, Collection<PropertySchema> propertySchemas, Norm norm, LabelSchema labelSchema, Separator separator) throws IOException { boolean isSinglePropertyFeature = propertySchemas.size() == 1; PropertySchema firstPropertySchema = propertySchemas.iterator().next(); if (isSinglePropertyFeature) { PropertySchemaStats propertySchemaStats = labelSchema.getPropertySchemaStats(firstPropertySchema.property()); if (firstPropertySchema.isMultiValue() && !propertySchemaStats.isUniformCardinality()) { warnings.add(String.format("Unable to add numerical edge feature: Edge of type '%s' has a multi-value numerical property '%s' with differing numbers of values.", label.fullyQualifiedLabel(), firstPropertySchema.property())); return; } } generator.writeStartObject(); generator.writeStringField("feat_type", "edge"); FeatureTypeV1.numerical.addTo(generator); generator.writeArrayFieldStart("cols"); generator.writeString("~from"); generator.writeString("~to"); for (PropertySchema propertySchema : propertySchemas) { generator.writeString(propertyName.escaped(propertySchema, printerOptions)); } generator.writeEndArray(); norm.addTo(generator); if (isSinglePropertyFeature) { separator.writeTo(generator, firstPropertySchema.isMultiValue()); } writeEdgeType(label); generator.writeEndObject(); } private void writeEdgeLabel(LabelSchema labelSchema, LabelConfigV1 labelConfig) throws IOException { Label label = labelSchema.label(); if (labelSchema.containsProperty(labelConfig.property())) { PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); generator.writeArrayFieldStart("labels"); generator.writeStartObject(); generator.writeStringField("label_type", "edge"); generator.writeStringField("sub_label_type", labelConfig.labelType()); generator.writeArrayFieldStart("cols"); generator.writeString("~from"); generator.writeString("~to"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); generator.writeEndArray(); writeSplitRates(labelConfig); if (propertySchema.isMultiValue()) { writeSeparator(";"); } writeEdgeType(label); generator.writeEndObject(); generator.writeEndArray(); } else { warnings.add( String.format("Unable to add edge class label: Edge of type '%s' does not contain property '%s'.", label.labelsAsString(), labelConfig.property())); } } private void writeSeparator(String separator) throws IOException { generator.writeStringField("separator", separator); } private void writeFileName(GraphElementType graphElementType, String outputId) throws IOException { generator.writeStringField("file_name", String.format("%s/%s", graphElementType.name(), new File(outputId).getName())); } private void writeEdgeSpecType() throws IOException { generator.writeStringField("edge_spec_type", "edge"); } private void writeCols() throws IOException { generator.writeArrayFieldStart("cols"); generator.writeString("~from"); generator.writeString("~to"); generator.writeEndArray(); } private void writeEdgeType(Label label) throws IOException { generator.writeArrayFieldStart("edge_type"); generator.writeString(label.fromLabelsAsString()); generator.writeString(label.labelsAsString()); generator.writeString(label.toLabelsAsString()); generator.writeEndArray(); } }
4,327
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/PropertyGraphTrainingDataConfigBuilderV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.LabelConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureOverrideConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.NumericalBucketFeatureConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.TrainingDataWriterConfigV1; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.*; public class PropertyGraphTrainingDataConfigBuilderV1 { public static PropertyGraphTrainingDataConfigBuilderV1 builder() { return new PropertyGraphTrainingDataConfigBuilderV1(); } Map<Label, LabelConfigV1> nodeClassLabels = new HashMap<>(); Map<Label, LabelConfigV1> edgeClassLabels = new HashMap<>(); Collection<Word2VecConfig> word2VecNodeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures = new ArrayList<>(); Collection<FeatureOverrideConfigV1> nodeFeatureOverrides = new ArrayList<>(); Collection<FeatureOverrideConfigV1> edgeFeatureOverrides = new ArrayList<>(); Collection<Double> splitRates = Arrays.asList(0.7, 0.1, 0.2); public PropertyGraphTrainingDataConfigBuilderV1 withNodeClassLabel(Label label, String column) { nodeClassLabels.put(label, new LabelConfigV1("node_class_label", column, splitRates)); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withEdgeClassLabel(Label label, String column) { edgeClassLabels.put(label, new LabelConfigV1("edge_class_label", column, splitRates)); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withWord2VecNodeFeature(Label label, String column, String... languages) { word2VecNodeFeatures.add(new Word2VecConfig(label, column, Arrays.asList(languages))); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withNumericalBucketFeature(Label label, String column, Range range, int bucketCount, int slideWindowSize) { numericalBucketFeatures.add(new NumericalBucketFeatureConfigV1(label, column, range, bucketCount, slideWindowSize)); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withNodeFeatureOverride(FeatureOverrideConfigV1 override) { nodeFeatureOverrides.add(override); return this; } public PropertyGraphTrainingDataConfigBuilderV1 withEdgeFeatureOverride(FeatureOverrideConfigV1 override) { edgeFeatureOverrides.add(override); return this; } public TrainingDataWriterConfigV1 build() { return new TrainingDataWriterConfigV1("training-job", nodeClassLabels, edgeClassLabels, word2VecNodeFeatures, numericalBucketFeatures, nodeFeatureOverrides, edgeFeatureOverrides); } }
4,328
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/FeatureTypeV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.JsonNode; import java.io.IOException; public enum FeatureTypeV1 { category{ @Override public void validateOverride(JsonNode node, String description, Label label) { // Do nothing } }, numerical{ @Override public void validateOverride(JsonNode node, String description, Label label) { if (node.has("separator")){ throw new IllegalArgumentException(String.format("Invalid 'separator' field for %s for '%s': numerical feature properties cannot contain multiple values.", description, label.fullyQualifiedLabel())); } } }, word2vec{ @Override public void validateOverride(JsonNode node, String description, Label label) { // Do nothing } }, bucket_numerical{ @Override public void validateOverride(JsonNode node, String description, Label label) { if (node.has("separator")){ throw new IllegalArgumentException(String.format("Invalid 'separator' field for %s for '%s': numerical feature properties cannot contain multiple values.", description, label.fullyQualifiedLabel())); } } }; public void addTo(JsonGenerator generator) throws IOException { generator.writeStringField("sub_feat_type", name()); } public abstract void validateOverride(JsonNode node, String description, Label label); }
4,329
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/NumericalBucketFeatureConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.propertygraph.Label; public class NumericalBucketFeatureConfigV1 { private final Label label; private final String property; private final Range range; private final int bucketCount; private final int slideWindowSize; public NumericalBucketFeatureConfigV1(Label label, String property, Range range, int bucketCount, int slideWindowSize) { this.label = label; this.property = property; this.range = range; this.bucketCount = bucketCount; this.slideWindowSize = slideWindowSize; } public Label label() { return label; } public String property() { return property; } public int bucketCount() { return bucketCount; } public int slideWindowSize() { return slideWindowSize; } public Range range() { return range; } @Override public String toString() { return "NumericalBucketFeatureConfig{" + "label=" + label + ", property='" + property + '\'' + ", range=" + range + ", bucketCount=" + bucketCount + ", slideWindowSize=" + slideWindowSize + '}'; } }
4,330
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/TrainingDataWriterConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing.ParseLabelsV1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseSplitRate; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.*; import java.util.stream.Collectors; public class TrainingDataWriterConfigV1 { public static final Collection<Double> DEFAULT_SPLIT_RATES_V1 = Arrays.asList(0.7, 0.1, 0.2); private static final String DEFAULT_NAME_V1 = "training-job-configuration"; public static Collection<TrainingDataWriterConfigV1> fromJson(JsonNode json) { Collection<TrainingDataWriterConfigV1> results = new ArrayList<>(); if (json.isArray()) { ArrayNode configNodes = (ArrayNode) json; int index = 1; for (JsonNode configNode : configNodes) { results.add(getTrainingJobWriterConfig(configNode, index++)); } } else { results.add(getTrainingJobWriterConfig(json, 1)); } Set<String> names = results.stream().map(TrainingDataWriterConfigV1::name).collect(Collectors.toSet()); if (names.size() < results.size()) { throw new IllegalStateException(String.format("Training job configuration names must be unique: %s", names)); } return results; } private static TrainingDataWriterConfigV1 getTrainingJobWriterConfig(JsonNode json, int index) { Map<Label, LabelConfigV1> nodeClassLabels = new HashMap<>(); Map<Label, LabelConfigV1> edgeClassLabels = new HashMap<>(); Collection<Word2VecConfig> word2VecNodeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures = new ArrayList<>(); Collection<FeatureOverrideConfigV1> nodeFeatureOverrides = new ArrayList<>(); Collection<FeatureOverrideConfigV1> edgeFeatureOverrides = new ArrayList<>(); Collection<Double> defaultSplitRates = new ParseSplitRate(json, DEFAULT_SPLIT_RATES_V1, new ParsingContext("config")).parseSplitRates(); String name = json.has("name") ? json.get("name").textValue() : index > 1 ? String.format("%s-%s", DEFAULT_NAME_V1, index) : DEFAULT_NAME_V1; if (json.has("targets")) { JsonNode labels = json.path("targets"); Collection<JsonNode> labelNodes = new ArrayList<>(); if (labels.isArray()) { labels.forEach(labelNodes::add); } else { labelNodes.add(labels); } ParseLabelsV1 parseLabels = new ParseLabelsV1(labelNodes, defaultSplitRates); parseLabels.validate(); nodeClassLabels.putAll(parseLabels.parseNodeClassLabels()); edgeClassLabels.putAll(parseLabels.parseEdgeClassLabels()); } if (json.has("features")) { JsonNode features = json.path("features"); Collection<JsonNode> featureNodes = new ArrayList<>(); if (features.isArray()) { features.forEach(featureNodes::add); } else { featureNodes.add(features); } ParseFeaturesV1 parseFeatures = new ParseFeaturesV1(featureNodes); parseFeatures.validate(); word2VecNodeFeatures.addAll(parseFeatures.parseWord2VecNodeFeatures()); numericalBucketFeatures.addAll(parseFeatures.parseNumericalBucketFeatures()); nodeFeatureOverrides.addAll(parseFeatures.parseNodeFeatureOverrides()); edgeFeatureOverrides.addAll(parseFeatures.parseEdgeFeatureOverrides()); } return new TrainingDataWriterConfigV1( name, nodeClassLabels, edgeClassLabels, word2VecNodeFeatures, numericalBucketFeatures, nodeFeatureOverrides, edgeFeatureOverrides); } private final String name; private final Map<Label, LabelConfigV1> nodeClassLabels; private final Map<Label, LabelConfigV1> edgeClassLabels; private final Collection<Word2VecConfig> word2VecNodeFeatures; private final Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures; private final Collection<FeatureOverrideConfigV1> nodeFeatureOverrides; private final Collection<FeatureOverrideConfigV1> edgeFeatureOverrides; public TrainingDataWriterConfigV1() { this(DEFAULT_NAME_V1, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); } public TrainingDataWriterConfigV1(String name, Map<Label, LabelConfigV1> nodeClassLabels, Map<Label, LabelConfigV1> edgeClassLabels, Collection<Word2VecConfig> word2VecNodeFeatures, Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures, Collection<FeatureOverrideConfigV1> nodeFeatureOverrides, Collection<FeatureOverrideConfigV1> edgeFeatureOverrides) { this.name = name; this.nodeClassLabels = nodeClassLabels; this.edgeClassLabels = edgeClassLabels; this.word2VecNodeFeatures = word2VecNodeFeatures; this.numericalBucketFeatures = numericalBucketFeatures; this.nodeFeatureOverrides = nodeFeatureOverrides; this.edgeFeatureOverrides = edgeFeatureOverrides; } public boolean hasNodeClassificationSpecificationForNode(Label nodeType) { return nodeClassLabels.containsKey(nodeType); } public LabelConfigV1 getNodeClassificationPropertyForNode(Label nodeType) { return nodeClassLabels.get(nodeType); } public boolean isNodeClassificationPropertyForNode(Label nodeType, String property) { if (hasNodeClassificationSpecificationForNode(nodeType)) { return getNodeClassificationPropertyForNode(nodeType).property().equals(property); } else { return false; } } public boolean hasEdgeClassificationSpecificationForEdge(Label edgeType) { return edgeClassLabels.containsKey(edgeType); } public LabelConfigV1 getEdgeClassificationPropertyForEdge(Label nodeType) { return edgeClassLabels.get(nodeType); } public boolean isEdgeClassificationPropertyForEdge(Label edgeType, String property) { if (hasEdgeClassificationSpecificationForEdge(edgeType)) { return getEdgeClassificationPropertyForEdge(edgeType).property().equals(property); } else { return false; } } public boolean hasWord2VecSpecification(Label nodeType, String property) { return getWord2VecSpecification(nodeType, property) != null; } public Word2VecConfig getWord2VecSpecification(Label nodeType, String property) { return word2VecNodeFeatures.stream() .filter(config -> config.label().equals(nodeType) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasNumericalBucketSpecification(Label nodeType, String property) { return getNumericalBucketSpecification(nodeType, property) != null; } public NumericalBucketFeatureConfigV1 getNumericalBucketSpecification(Label nodeType, String property) { return numericalBucketFeatures.stream() .filter(config -> config.label().equals(nodeType) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasNodeFeatureOverrideForNodeProperty(Label nodeType, String property) { return nodeFeatureOverrides.stream() .anyMatch(override -> override.label().equals(nodeType) && override.properties().contains(property)); } public Collection<FeatureOverrideConfigV1> getNodeFeatureOverrides(Label nodeType) { return nodeFeatureOverrides.stream() .filter(c -> c.label().equals(nodeType)) .collect(Collectors.toList()); } public FeatureOverrideConfigV1 getNodeFeatureOverride(Label nodeType, String property) { return nodeFeatureOverrides.stream() .filter(config -> config.label().equals(nodeType) && config.properties().contains(property)) .findFirst() .orElse(null); } public boolean hasEdgeFeatureOverrideForEdgeProperty(Label edgeType, String property) { return edgeFeatureOverrides.stream() .anyMatch(override -> override.label().equals(edgeType) && override.properties().contains(property)); } public Collection<FeatureOverrideConfigV1> getEdgeFeatureOverrides(Label edgeType) { return edgeFeatureOverrides.stream() .filter(c -> c.label().equals(edgeType)) .collect(Collectors.toList()); } public FeatureOverrideConfigV1 getEdgeFeatureOverride(Label edgeType, String property) { return edgeFeatureOverrides.stream() .filter(config -> config.label().equals(edgeType) && config.properties().contains(property)) .findFirst() .orElse(null); } @Override public String toString() { return "TrainingJobWriterConfig{" + "nodeClassLabels=" + nodeClassLabels + ", edgeClassLabels=" + edgeClassLabels + ", word2VecNodeFeatures=" + word2VecNodeFeatures + ", numericalBucketFeatures=" + numericalBucketFeatures + ", nodeFeatureOverrides=" + nodeFeatureOverrides + ", edgeFeatureOverrides=" + edgeFeatureOverrides + '}'; } public String name() { return name; } }
4,331
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/LabelConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import java.util.Collection; import java.util.Optional; public class LabelConfigV1 { private final String labelType; private final String property; private final Collection<Double> splitRates; public LabelConfigV1(String labelType, String property, Collection<Double> splitRates) { this.labelType = labelType; this.property = property; this.splitRates = splitRates; if (this.splitRates.size() != 3) { throw new IllegalArgumentException("split rates must contain 3 values"); } Optional<Double> sum = this.splitRates.stream().reduce(Double::sum); if (sum.orElse(0.0) != 1.0) { throw new IllegalArgumentException("split rate values must add up to 1.0"); } } public String property() { return property; } public Collection<Double> splitRates() { return splitRates; } public String labelType() { return labelType; } }
4,332
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/config/FeatureOverrideConfigV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class FeatureOverrideConfigV1 { private final Label label; private final Collection<String> properties; private final FeatureTypeV1 featureType; private final Norm norm; private final Separator separator; public FeatureOverrideConfigV1(Label label, Collection<String> properties, FeatureTypeV1 featureType, Norm norm, Separator separator) { this.label = label; this.properties = properties; this.featureType = featureType; this.norm = norm; this.separator = separator; } public Label label() { return label; } public boolean isSinglePropertyOverride() { return properties.size() == 1; } public String firstProperty() { return properties.iterator().next(); } public Collection<String> properties() { return properties; } public FeatureTypeV1 featureType() { return featureType; } public Norm norm() { return norm; } public Separator separator() { return separator; } }
4,333
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseFeaturesV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.*; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureOverrideConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureTypeV1; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.NumericalBucketFeatureConfigV1; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseFeaturesV1 { private final Collection<JsonNode> nodes; public ParseFeaturesV1(Collection<JsonNode> nodes) { this.nodes = nodes; } public void validate() { for (JsonNode node : nodes) { if (!isWord2VecNodeFeature(node) && !isNumericalBucketFeature(node) && !isNodeFeatureOverride(node) && !isEdgeFeatureOverride(node)) { throw new IllegalArgumentException("Illegal feature element: expected category or numerical feature definitions for nodes and edges, or word2vec or bucket_numerical feature definitions for nodes"); } } } public Collection<Word2VecConfig> parseWord2VecNodeFeatures() { Collection<Word2VecConfig> word2VecFeatures = new ArrayList<>(); for (JsonNode node : nodes) { if (isWord2VecNodeFeature(node)) { String description = "word2vec feature"; ParsingContext context = new ParsingContext(FeatureTypeV1.word2vec.name() + " feature"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); String property = new ParseProperty(node, context.withLabel(nodeType)).parseSingleProperty(); Collection<String> language = new ParseWord2VecLanguage(node).parseLanguage(); Word2VecConfig config = new Word2VecConfig(nodeType, property, language); word2VecFeatures.add(config); } } return word2VecFeatures; } public Collection<NumericalBucketFeatureConfigV1> parseNumericalBucketFeatures() { Collection<NumericalBucketFeatureConfigV1> numericalBucketFeatures = new ArrayList<>(); for (JsonNode node : nodes) { if (isNumericalBucketFeature(node)) { String description = FeatureTypeV1.bucket_numerical.name(); ParsingContext context = new ParsingContext(FeatureTypeV1.bucket_numerical.name() + " feature"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); FeatureTypeV1.bucket_numerical.validateOverride(node, description, nodeType); String property = new ParseProperty(node, context.withLabel(nodeType)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(nodeType).withProperty(property); Range range = new ParseRange(node, "range", propertyContext).parseRange(); int bucketCount = new ParseBucketCountV1(node, propertyContext).parseBucketCount(); int slideWindowSize = new ParseSlideWindowSize(node, propertyContext).parseSlideWindowSize(); NumericalBucketFeatureConfigV1 config = new NumericalBucketFeatureConfigV1(nodeType, property, range, bucketCount, slideWindowSize); numericalBucketFeatures.add(config); } } return numericalBucketFeatures; } public Collection<FeatureOverrideConfigV1> parseNodeFeatureOverrides() { Collection<FeatureOverrideConfigV1> featureOverrides = new ArrayList<>(); for (JsonNode node : nodes) { if (isNodeFeatureOverride(node)) { String description = "node feature"; ParsingContext context = new ParsingContext("node feature"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); Collection<String> properties = new ParseProperty(node, context.withLabel(nodeType)).parseMultipleProperties(); ParsingContext propertiesContext = context.withLabel(nodeType).withProperties(properties); FeatureTypeV1 type = new ParseFeatureTypeV1(node, propertiesContext).parseFeatureType(); type.validateOverride(node, description, nodeType); Norm norm = new ParseNorm(node, propertiesContext).parseNorm(); Separator separator = new ParseSeparator(node).parseSeparator(); FeatureOverrideConfigV1 config = new FeatureOverrideConfigV1(nodeType, properties, type, norm, separator); featureOverrides.add(config); } } return featureOverrides; } public Collection<FeatureOverrideConfigV1> parseEdgeFeatureOverrides() { Collection<FeatureOverrideConfigV1> featureOverrides = new ArrayList<>(); for (JsonNode node : nodes) { if (isEdgeFeatureOverride(node)) { ParsingContext context = new ParsingContext("edge feature"); String description = "edge feature"; Label edgeType = new ParseEdgeType(node, context).parseEdgeType(); Collection<String> properties = new ParseProperty(node, context.withLabel(edgeType)).parseMultipleProperties(); ParsingContext propertiesContext = context.withLabel(edgeType).withProperties(properties); FeatureTypeV1 type = new ParseFeatureTypeV1(node, propertiesContext).parseFeatureType(); type.validateOverride(node, description, edgeType); Norm norm = new ParseNorm(node, propertiesContext).parseNorm(); Separator separator = new ParseSeparator(node).parseSeparator(); featureOverrides.add(new FeatureOverrideConfigV1(edgeType, properties, type, norm, separator)); } } return featureOverrides; } private boolean isWord2VecNodeFeature(JsonNode node) { return isNodeFeature(node) && isWord2VecType(node.get("type").textValue()); } private boolean isNumericalBucketFeature(JsonNode node) { return isNodeFeature(node) && isBucketNumericalType(node.get("type").textValue()); } private boolean isNodeFeatureOverride(JsonNode node) { if (isNodeFeature(node)) { String type = node.get("type").textValue(); return (isNumericalType(type) || isCategoricalType(type)); } return false; } private boolean isEdgeFeatureOverride(JsonNode node) { if (isEdgeFeature(node)) { String type = node.get("type").textValue(); return (isNumericalType(type) || isCategoricalType(type)); } return false; } private boolean isNodeFeature(JsonNode node) { return node.has("node") && node.has("type"); } private boolean isEdgeFeature(JsonNode node) { return node.has("edge") && node.has("type"); } private boolean isWord2VecType(String type) { return FeatureTypeV1.word2vec.name().equals(type); } private boolean isBucketNumericalType(String type) { return FeatureTypeV1.bucket_numerical.name().equals(type); } private boolean isCategoricalType(String type) { return FeatureTypeV1.category.name().equals(type); } private boolean isNumericalType(String type) { return FeatureTypeV1.numerical.name().equals(type); } }
4,334
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseFeatureTypeV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.FeatureTypeV1; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseFeatureTypeV1 { private final JsonNode json; private final ParsingContext context; public ParseFeatureTypeV1(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public FeatureTypeV1 parseFeatureType() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); if ( type.equals("numerical") || type.equals("category")){ return FeatureTypeV1.valueOf(type); } else { throw ErrorMessageHelper.invalidFieldValue("type", type, context, Arrays.asList("numerical", "category")); } } throw ErrorMessageHelper.errorParsingField("type", context, "'numerical' or 'category' value"); } }
4,335
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseBucketCountV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; public class ParseBucketCountV1 { private final JsonNode json; private final ParsingContext context; public ParseBucketCountV1(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public int parseBucketCount() { if (json.has("num_buckets") && json.path("num_buckets").isInt()) { return json.path("num_buckets").asInt(); } else { throw ErrorMessageHelper.errorParsingField("num_buckets", context, "an integer"); } } }
4,336
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseLabelsV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.v1.config.LabelConfigV1; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.Collection; import java.util.HashMap; import java.util.Map; public class ParseLabelsV1 { private final Collection<JsonNode> nodes; private final Collection<Double> defaultSplitRates; public ParseLabelsV1(Collection<JsonNode> nodes, Collection<Double> defaultSplitRates) { this.nodes = nodes; this.defaultSplitRates = defaultSplitRates; } public Map<Label, LabelConfigV1> parseNodeClassLabels() { Map<Label, LabelConfigV1> nodeClassLabels = new HashMap<>(); for (JsonNode node : nodes) { if (isNodeClass(node)) { ParsingContext context = new ParsingContext("node label"); Label nodeType = new ParseNodeType(node, context).parseNodeType(); String property = new ParseProperty(node, context.withLabel(nodeType)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(nodeType).withProperty(property); String labelType = new ParseLabelTypeV1("node", node, propertyContext).parseLabel(); Collection<Double> splitRates = new ParseSplitRate(node, defaultSplitRates, propertyContext).parseSplitRates(); nodeClassLabels.put(nodeType, new LabelConfigV1(labelType, property, splitRates)); } } return nodeClassLabels; } public Map<Label, LabelConfigV1> parseEdgeClassLabels() { Map<Label, LabelConfigV1> edgeClassLabels = new HashMap<>(); for (JsonNode node : nodes) { if (isEdgeClass(node)) { ParsingContext context = new ParsingContext("edge label"); Label edgeType = new ParseEdgeType(node, context).parseEdgeType(); String property = new ParseProperty(node, context.withLabel(edgeType)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(edgeType).withProperty(property); String labelType = new ParseLabelTypeV1("edge", node, propertyContext).parseLabel(); Collection<Double> splitRates = new ParseSplitRate(node, defaultSplitRates, propertyContext).parseSplitRates(); edgeClassLabels.put(edgeType, new LabelConfigV1(labelType, property, splitRates)); } } return edgeClassLabels; } public void validate() { for (JsonNode node : nodes) { if (!isNodeClass(node) && !isEdgeClass(node)) { throw new IllegalArgumentException("Illegal label element. Expected 'node' or 'edge' field, and a 'property' field."); } } } private boolean isNodeClass(JsonNode node) { return node.has("node") && node.has("property"); } private boolean isEdgeClass(JsonNode node) { return node.has("edge") && node.has("property"); } }
4,337
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v1/parsing/ParseLabelTypeV1.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v1.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseLabelTypeV1 { private final String prefix; private final JsonNode json; private final ParsingContext context; public ParseLabelTypeV1(String prefix, JsonNode json, ParsingContext context) { this.prefix = prefix; this.json = json; this.context = context; } public String parseLabel() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); if (type.equals("regression")) { return regressionLabel(); } else if (type.equals("classification")) { return classLabel(); } else { throw ErrorMessageHelper.invalidFieldValue("type", type, context, Arrays.asList("classification", "regression")); } } return classLabel(); } private String regressionLabel() { return String.format("%s_regression_label", prefix); } private String classLabel() { return String.format("%s_class_label", prefix); } }
4,338
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/PropertyGraphTrainingDataConfigWriterV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2; import com.amazonaws.services.neptune.profiles.neptune_ml.common.PropertyName; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.*; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.*; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.stream.Collectors; public class PropertyGraphTrainingDataConfigWriterV2 { public static final PropertyName COLUMN_NAME_WITH_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithDataType(); } }; public static final PropertyName COLUMN_NAME_WITHOUT_DATATYPE = new PropertyName() { @Override public String escaped(PropertySchema propertySchema, PrinterOptions printerOptions) { return propertySchema.nameWithoutDataType(printerOptions.csv().escapeCsvHeaders()); } @Override public String unescaped(PropertySchema propertySchema) { return propertySchema.nameWithoutDataType(); } }; private final GraphSchema graphSchema; private final JsonGenerator generator; private final PropertyName propertyName; private final PrinterOptions printerOptions; private final TrainingDataWriterConfigV2 config; private final Collection<String> warnings = new ArrayList<>(); public PropertyGraphTrainingDataConfigWriterV2(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions) { this(graphSchema, generator, propertyName, printerOptions, new TrainingDataWriterConfigV2()); } public PropertyGraphTrainingDataConfigWriterV2(GraphSchema graphSchema, JsonGenerator generator, PropertyName propertyName, PrinterOptions printerOptions, TrainingDataWriterConfigV2 config) { this.graphSchema = graphSchema; this.generator = generator; this.propertyName = propertyName; this.printerOptions = printerOptions; this.config = config; } public void write() throws IOException { write(true); } public void write(boolean includeEdgeFeatures) throws IOException { generator.writeStartObject(); generator.writeStringField("version", "v2.0"); generator.writeStringField("query_engine", "gremlin"); generator.writeObjectFieldStart("graph"); writeNodes(); writeEdges(includeEdgeFeatures); generator.writeEndObject(); generator.writeArrayFieldStart("warnings"); writeWarnings(); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); } private void writeWarnings() throws IOException { for (String warning : warnings) { generator.writeString(warning); } } private void writeNodes() throws IOException { GraphElementType graphElementType = GraphElementType.nodes; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); generator.writeArrayFieldStart("nodes"); for (Label nodeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(nodeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(nodeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeCommaSeparator(); writeNodeType(labelSchema); writeNodeFeatures(labelSchema); writeNodeLabels(labelSchema); generator.writeEndObject(); } } generator.writeEndArray(); } private void writeEdges(boolean includeEdgeFeatures) throws IOException { GraphElementType graphElementType = GraphElementType.edges; GraphElementSchemas graphElementSchemas = graphSchema.graphElementSchemasFor(graphElementType); generator.writeArrayFieldStart("edges"); for (Label edgeLabel : graphElementSchemas.labels()) { Collection<String> outputIds = graphElementSchemas.getOutputIdsFor(edgeLabel); LabelSchema labelSchema = graphElementSchemas.getSchemaFor(edgeLabel); for (String outputId : outputIds) { generator.writeStartObject(); writeFileName(graphElementType, outputId); writeCommaSeparator(); writeEdgeType(labelSchema); if (includeEdgeFeatures){ writeEdgeFeatures(labelSchema); } writeEdgeLabels(labelSchema); generator.writeEndObject(); } } generator.writeEndArray(); } private void writeNodeType(LabelSchema labelSchema) throws IOException { generator.writeArrayFieldStart("node"); generator.writeString("~id"); generator.writeString(labelSchema.label().labelsAsString()); generator.writeEndArray(); } private void writeEdgeType(LabelSchema labelSchema) throws IOException { generator.writeArrayFieldStart("source"); generator.writeString("~from"); generator.writeString(labelSchema.label().fromLabelsAsString()); generator.writeEndArray(); generator.writeArrayFieldStart("relation"); generator.writeString(""); generator.writeString(labelSchema.label().labelsAsString()); generator.writeEndArray(); generator.writeArrayFieldStart("dest"); generator.writeString("~to"); generator.writeString(labelSchema.label().toLabelsAsString()); generator.writeEndArray(); } private void writeNodeFeatures(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); Collection<PropertySchema> propertySchemas = labelSchema.propertySchemas(); generator.writeArrayFieldStart("features"); ElementConfig nodeConfig = config.nodeConfig(); for (PropertySchema propertySchema : propertySchemas) { String column = propertySchema.nameWithoutDataType(); if (nodeConfig.hasClassificationSpecificationForProperty(label, column)) { continue; } if (!config.allowFeatureEncoding()) { writeNoneFeature(propertySchema); } else { if (nodeConfig.allowAutoInferFeature(label, column)) { writeAutoInferredFeature(propertySchema); } if (nodeConfig.hasNoneFeatureSpecification(label, column)) { writeNoneFeature(propertySchema); } if (nodeConfig.hasTfIdfSpecification(label, column)) { writeTfIdfFeature(propertySchema, nodeConfig.getTfIdfSpecification(label, column)); } if (nodeConfig.hasDatetimeSpecification(label, column)) { writeDatetimeFeature(propertySchema, nodeConfig.getDatetimeSpecification(label, column)); } if (nodeConfig.hasWord2VecSpecification(label, column)) { writeWord2VecFeature(propertySchema, nodeConfig.getWord2VecSpecification(label, column)); } if (nodeConfig.hasFastTextSpecification(label, column)) { writeFastTextFeature(propertySchema, nodeConfig.getFastTextSpecification(label, column)); } if (nodeConfig.hasSbertSpecification(label, column)) { writeSbertFeature(propertySchema, nodeConfig.getSbertSpecification(label, column)); } if (nodeConfig.hasNumericalBucketSpecification(label, column)) { writeNumericalBucketFeature(propertySchema, nodeConfig.getNumericalBucketSpecification(label, column)); } } } for (FeatureOverrideConfigV2 featureOverride : nodeConfig.getFeatureOverrides(label)) { writeFeatureOverride(labelSchema, featureOverride, nodeConfig); } generator.writeEndArray(); } private void writeEdgeFeatures(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); Collection<PropertySchema> propertySchemas = labelSchema.propertySchemas(); generator.writeArrayFieldStart("features"); ElementConfig edgeConfig = config.edgeConfig(); for (PropertySchema propertySchema : propertySchemas) { String column = propertySchema.nameWithoutDataType(); if (edgeConfig.hasClassificationSpecificationForProperty(label, column)) { continue; } if (!config.allowFeatureEncoding()) { writeNoneFeature(propertySchema); } else { if (edgeConfig.allowAutoInferFeature(label, column)) { writeAutoInferredFeature(propertySchema); } if (edgeConfig.hasNoneFeatureSpecification(label, column)) { writeNoneFeature(propertySchema); } if (edgeConfig.hasTfIdfSpecification(label, column)) { writeTfIdfFeature(propertySchema, edgeConfig.getTfIdfSpecification(label, column)); } if (edgeConfig.hasDatetimeSpecification(label, column)) { writeDatetimeFeature(propertySchema, edgeConfig.getDatetimeSpecification(label, column)); } if (edgeConfig.hasWord2VecSpecification(label, column)) { writeWord2VecFeature(propertySchema, edgeConfig.getWord2VecSpecification(label, column)); } if (edgeConfig.hasFastTextSpecification(label, column)) { writeFastTextFeature(propertySchema, edgeConfig.getFastTextSpecification(label, column)); } if (edgeConfig.hasSbertSpecification(label, column)) { writeSbertFeature(propertySchema, edgeConfig.getSbertSpecification(label, column)); } if (edgeConfig.hasNumericalBucketSpecification(label, column)) { writeNumericalBucketFeature(propertySchema, edgeConfig.getNumericalBucketSpecification(label, column)); } } } for (FeatureOverrideConfigV2 featureOverride : edgeConfig.getFeatureOverrides(label)) { writeFeatureOverride(labelSchema, featureOverride, edgeConfig); } generator.writeEndArray(); } private void writeNodeLabels(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); ElementConfig nodeConfig = config.nodeConfig(); if (nodeConfig.hasClassificationSpecificationsFor(label)) { generator.writeArrayFieldStart("labels"); for (LabelConfigV2 labelConfig : nodeConfig.getClassificationSpecifications(label)) { if (labelSchema.containsProperty(labelConfig.property())) { PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); writeLabel(propertySchema, labelConfig); } else { ParsingContext context = new ParsingContext("node classification property").withLabel(label).withProperty(labelConfig.property()); warnings.add(String.format("Unrecognized %s.", context)); } } generator.writeEndArray(); } } private void writeLabel(PropertySchema propertySchema, LabelConfigV2 labelConfig) throws IOException { generator.writeStartObject(); generator.writeArrayFieldStart("label"); generator.writeString(labelConfig.property()); generator.writeString(labelConfig.taskType()); generator.writeEndArray(); writeSplitRates(labelConfig); labelConfig.separator().writeTo(generator, propertySchema.isMultiValue()); generator.writeEndObject(); } private void writeEdgeLabels(LabelSchema labelSchema) throws IOException { Label label = labelSchema.label(); ElementConfig edgeConfig = config.edgeConfig(); if (edgeConfig.hasClassificationSpecificationsFor(label)) { generator.writeArrayFieldStart("labels"); for (LabelConfigV2 labelConfig : edgeConfig.getClassificationSpecifications(label)) { if (StringUtils.isEmpty(labelConfig.property())) { writeLabel(new PropertySchema(""), labelConfig); } else if (labelSchema.containsProperty(labelConfig.property())) { PropertySchema propertySchema = labelSchema.getPropertySchema(labelConfig.property()); writeLabel(propertySchema, labelConfig); } else { ParsingContext context = new ParsingContext("edge classification property").withLabel(label).withProperty(labelConfig.property()); warnings.add(String.format("Unrecognized %s.", context)); } } generator.writeEndArray(); } } private void writeSplitRates(LabelConfigV2 labelConfig) throws IOException { generator.writeArrayFieldStart("split_rate"); for (Double rate : labelConfig.splitRates()) { generator.writeNumber(rate); } generator.writeEndArray(); } private void writeFeatureOverride(LabelSchema labelSchema, FeatureOverrideConfigV2 featureOverride, ElementConfig elementConfig) throws IOException { FeatureTypeV2 featureType = featureOverride.featureType(); Label label = labelSchema.label(); Collection<PropertySchema> propertySchemas = labelSchema.propertySchemas().stream() .filter(p -> featureOverride.properties().contains(p.nameWithoutDataType()) && !elementConfig.hasClassificationSpecificationForProperty(label, p.nameWithoutDataType())) .collect(Collectors.toList()); Collection<String> propertyNames = propertySchemas.stream() .map(PropertySchema::nameWithoutDataType) .collect(Collectors.toList()); Collection<String> missingProperties = featureOverride.properties().stream() .filter(p -> !propertyNames.contains(p)) .collect(Collectors.toList()); for (String missingProperty : missingProperties) { ParsingContext context = new ParsingContext(featureType.name() + " feature override").withLabel(label).withProperty(missingProperty); warnings.add(String.format("Unable to add %s. Property is missing, or is being used to label the node.", context)); } if (FeatureTypeV2.category == featureType) { writeCategoricalFeature(propertySchemas, featureOverride); } else if (FeatureTypeV2.numerical == featureType) { writeNumericalFeature(propertySchemas, featureOverride); } else if (FeatureTypeV2.auto == featureType) { writeAutoFeature(propertySchemas, featureOverride); } else if (FeatureTypeV2.none == featureType) { // Do nothing } else { warnings.add(String.format("Unsupported feature type override for node: %s.", featureType.name())); } } private void writeAutoInferredFeature(PropertySchema propertySchema) throws IOException { if (propertySchema.dataType() == DataType.String || propertySchema.dataType() == DataType.Boolean) { writeAutoFeature(Collections.singletonList(propertySchema), ImputerTypeV2.none); } if (propertySchema.dataType() == DataType.Byte || propertySchema.dataType() == DataType.Short || propertySchema.dataType() == DataType.Integer || propertySchema.dataType() == DataType.Long || propertySchema.dataType() == DataType.Float || propertySchema.dataType() == DataType.Double) { if (propertySchema.isMultiValue()) { writeAutoFeature(Collections.singletonList(propertySchema), ImputerTypeV2.median); } else { writeNumericalFeature( Collections.singletonList(propertySchema), Norm.min_max, ImputerTypeV2.median); } } if (propertySchema.dataType() == DataType.Date) { writeDatetimeFeature( Collections.singletonList(propertySchema), Arrays.asList( DatetimePartV2.year, DatetimePartV2.month, DatetimePartV2.weekday, DatetimePartV2.hour)); } } private void writeFeature(PropertySchema propertySchema, FeatureTypeV2 featureType) throws IOException { writeFeature(propertySchema, featureType.name()); } private void writeFeature(PropertySchema propertySchema, String featureType) throws IOException { generator.writeArrayFieldStart("feature"); generator.writeString(propertyName.escaped(propertySchema, printerOptions)); // column generator.writeString(propertyName.escaped(propertySchema, printerOptions)); // feature name generator.writeString(featureType); generator.writeEndArray(); } private void writeTfIdfFeature(PropertySchema propertySchema, TfIdfConfigV2 tfIdfSpecification) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_tfidf, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.text_tfidf); Range range = tfIdfSpecification.ngramRange(); if (range != null) { generator.writeArrayFieldStart("ngram_range"); generator.writeObject(range.low()); generator.writeObject(range.high()); generator.writeEndArray(); } Integer minDf = tfIdfSpecification.minDf(); if (minDf != null) { generator.writeNumberField("min_df", minDf); } Integer maxFeatures = tfIdfSpecification.maxFeatures(); if (maxFeatures != null) { generator.writeNumberField("max_features", maxFeatures); } generator.writeEndObject(); } private void writeNumericalBucketFeature(PropertySchema propertySchema, NumericalBucketFeatureConfigV2 numericalBucketSpecification) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.bucket_numerical, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.bucket_numerical); Range range = numericalBucketSpecification.range(); if (range != null) { generator.writeArrayFieldStart("range"); generator.writeObject(range.low()); generator.writeObject(range.high()); generator.writeEndArray(); } Integer bucketCount = numericalBucketSpecification.bucketCount(); if (bucketCount != null) { generator.writeNumberField("bucket_cnt", bucketCount); } Integer slideWindowSize = numericalBucketSpecification.slideWindowSize(); if (slideWindowSize != null) { generator.writeNumberField("slide_window_size", slideWindowSize); } ImputerTypeV2 imputer = numericalBucketSpecification.imputerType(); if (imputer != null && imputer != ImputerTypeV2.none) { generator.writeStringField("imputer", imputer.formattedName()); } else { warnings.add(String.format("'imputer' value missing for %s feature for '%s'. Preprocessing will exit when it encounters an missing value.", FeatureTypeV2.bucket_numerical, propertySchema.nameWithoutDataType())); } generator.writeEndObject(); } private void writeWord2VecFeature(PropertySchema propertySchema, Word2VecConfig word2VecSpecification) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_word2vec, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.text_word2vec); if (!word2VecSpecification.languages().isEmpty()) { generator.writeArrayFieldStart("language"); for (String language : word2VecSpecification.languages()) { generator.writeString(language); try { SupportedWord2VecLanguages.valueOf(language); } catch (IllegalArgumentException e) { warnings.add(String.format("Unsupported language for text_word2vec feature for '%s': '%s'. " + "Supported languages are: %s. " + "The output embedding is not guaranteed to be valid if you supply another language.", propertySchema.nameWithoutDataType(), language, ErrorMessageHelper.quoteList(Arrays.stream(SupportedWord2VecLanguages.values()).map(Enum::name).collect(Collectors.toList())))); } } generator.writeEndArray(); } generator.writeEndObject(); } private void writeFastTextFeature(PropertySchema propertySchema, FastTextConfig fastTextConfig) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_fasttext, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.text_fasttext); String language = fastTextConfig.language(); try { SupportedFastTextLanguages.valueOf(language); } catch (IllegalArgumentException e) { warnings.add(String.format("Unsupported language for text_fasttext feature for '%s': '%s'. " + "Supported languages are: %s. " + "The output embedding is not guaranteed to be valid if you supply another language.", propertySchema.nameWithoutDataType(), language, ErrorMessageHelper.quoteList(Arrays.stream(SupportedFastTextLanguages.values()).map(Enum::name).collect(Collectors.toList())))); } generator.writeStringField("language", language); if (fastTextConfig.maxLength().isPresent()){ generator.writeNumberField("max_length", fastTextConfig.maxLength().get()); } generator.writeEndObject(); } private void writeSbertFeature(PropertySchema propertySchema, SbertConfig sbertConfig) throws IOException { if (propertySchema.isMultiValue()) { warnings.add(String.format("%s feature does not support multi-value properties. Auto-inferring a feature for '%s'.", FeatureTypeV2.text_sbert, propertySchema.nameWithoutDataType())); writeAutoInferredFeature(propertySchema); return; } generator.writeStartObject(); writeFeature(propertySchema, sbertConfig.name()); generator.writeEndObject(); } private void writeNoneFeature(PropertySchema propertySchema) throws IOException { writeNoneFeature(Collections.singletonList(propertySchema)); } private void writeNoneFeature(Collection<PropertySchema> propertySchemas) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.none); generator.writeEndObject(); } } private void writeDatetimeFeature(PropertySchema propertySchema, DatetimeConfigV2 datetimeConfig) throws IOException { writeDatetimeFeature(Collections.singletonList(propertySchema), datetimeConfig.datetimeParts()); } private void writeDatetimeFeature(Collection<PropertySchema> propertySchemas, Collection<DatetimePartV2> datetimeParts) throws IOException { for (PropertySchema propertySchema : propertySchemas) { if (propertySchema.isMultiValue()) { warnings.add(String.format("Unable to add datetime feature for '%s'. Multi-value datetime features not currently supported. Adding an auto feature instead.", propertySchema.nameWithoutDataType())); writeAutoFeature(Collections.singletonList(propertySchema), ImputerTypeV2.none); return; } generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.datetime); if (!datetimeParts.isEmpty()) { generator.writeArrayFieldStart("datetime_parts"); for (DatetimePartV2 datetimePart : datetimeParts) { generator.writeString(datetimePart.name()); } generator.writeEndArray(); } generator.writeEndObject(); } } private void writeNumericalFeature(Collection<PropertySchema> propertySchemas, FeatureOverrideConfigV2 featureOverride) throws IOException { writeNumericalFeature(propertySchemas, featureOverride.norm(), featureOverride.imputer(), featureOverride.separator()); } private void writeNumericalFeature(Collection<PropertySchema> propertySchemas, Norm norm, ImputerTypeV2 imputer) throws IOException { writeNumericalFeature(propertySchemas, norm, imputer, new Separator()); } private void writeNumericalFeature(Collection<PropertySchema> propertySchemas, Norm norm, ImputerTypeV2 imputer, Separator separator) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.numerical); separator.writeTo(generator, propertySchema.isMultiValue()); generator.writeStringField("norm", norm.formattedName()); if (imputer != ImputerTypeV2.none) { generator.writeStringField("imputer", imputer.formattedName()); } generator.writeEndObject(); } } private void writeCategoricalFeature(Collection<PropertySchema> propertySchemas, FeatureOverrideConfigV2 featureOverride) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.category); featureOverride.separator().writeTo(generator, propertySchema.isMultiValue()); generator.writeEndObject(); } } private void writeAutoFeature(Collection<PropertySchema> propertySchemas, FeatureOverrideConfigV2 featureOverride) throws IOException { writeAutoFeature(propertySchemas, featureOverride.imputer(), featureOverride.separator()); } private void writeAutoFeature(Collection<PropertySchema> propertySchemas, ImputerTypeV2 imputer) throws IOException { writeAutoFeature(propertySchemas, imputer, new Separator()); } private void writeAutoFeature(Collection<PropertySchema> propertySchemas, ImputerTypeV2 imputer, Separator separator) throws IOException { for (PropertySchema propertySchema : propertySchemas) { generator.writeStartObject(); writeFeature(propertySchema, FeatureTypeV2.auto); separator.writeTo(generator, propertySchema.isMultiValue()); if (imputer != ImputerTypeV2.none) { generator.writeStringField("imputer", imputer.formattedName()); } generator.writeEndObject(); } } private void writeFileName(GraphElementType graphElementType, String outputId) throws IOException { generator.writeStringField("file_name", String.format("%s/%s", graphElementType.name(), new File(outputId).getName())); } private void writeCommaSeparator() throws IOException { generator.writeStringField("separator", ","); } }
4,339
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/RdfTrainingDataConfigWriter.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.EdgeTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.LabelConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.RdfTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TrainingDataWriterConfigV2; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; public class RdfTrainingDataConfigWriter { private final Collection<String> filenames; private final JsonGenerator generator; private final TrainingDataWriterConfigV2 config; private final Collection<String> warnings = new ArrayList<>(); public RdfTrainingDataConfigWriter(Collection<String> filenames, JsonGenerator generator, TrainingDataWriterConfigV2 config) { this.filenames = filenames; this.generator = generator; this.config = config; } public void write() throws IOException { generator.writeStartObject(); generator.writeStringField("version", "v2.0"); generator.writeStringField("query_engine", "sparql"); generator.writeObjectFieldStart("graph"); writeRdfs(); generator.writeEndObject(); generator.writeArrayFieldStart("warnings"); writeWarnings(); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); } private void writeWarnings() throws IOException { for (String warning : warnings) { generator.writeString(warning); } } private void writeRdfs() throws IOException { generator.writeArrayFieldStart("rdfs"); Collection<LabelConfigV2> classificationSpecifications = config.nodeConfig().getAllClassificationSpecifications(); if (classificationSpecifications.isEmpty()) { for (String filename : filenames) { generator.writeStartObject(); generator.writeStringField("file_name", filename); generator.writeObjectFieldStart("label"); generator.writeStringField("task_type", EdgeTaskTypeV2.link_prediction.name()); generator.writeArrayFieldStart("targets"); generator.writeStartObject(); generator.writeArrayFieldStart("split_rate"); for (Double splitRate : config.defaultSplitRates()) { generator.writeNumber(splitRate); } generator.writeEndArray(); generator.writeEndObject(); generator.writeEndArray(); generator.writeEndObject(); generator.writeEndObject(); } } else { for (RdfTaskTypeV2 taskType : RdfTaskTypeV2.values()) { List<LabelConfigV2> taskSpecificConfigs = classificationSpecifications.stream().filter(c -> c.taskType().equals(taskType.name())).collect(Collectors.toList()); if (taskSpecificConfigs.isEmpty()) { continue; } if (taskType == RdfTaskTypeV2.link_prediction) { for (String filename : filenames) { generator.writeStartObject(); generator.writeStringField("file_name", filename); generator.writeObjectFieldStart("label"); generator.writeStringField("task_type", taskType.name()); generator.writeArrayFieldStart("targets"); for (LabelConfigV2 taskSpecificConfig : taskSpecificConfigs) { generator.writeStartObject(); if (StringUtils.isNotEmpty(taskSpecificConfig.subject())) { generator.writeStringField("subject", taskSpecificConfig.subject()); } else { warnings.add("'subject' field is missing for link_prediction task, so all edges will be treated as the training target."); } if (StringUtils.isNotEmpty(taskSpecificConfig.property())) { generator.writeStringField("predicate", taskSpecificConfig.property()); }else { warnings.add("'predicate' field is missing for link_prediction task, so all edges will be treated as the training target."); } if (StringUtils.isNotEmpty(taskSpecificConfig.object())) { generator.writeStringField("object", taskSpecificConfig.object()); }else { warnings.add("'object' field is missing for link_prediction task, so all edges will be treated as the training target."); } generator.writeArrayFieldStart("split_rate"); for (Double splitRate : taskSpecificConfig.splitRates()) { generator.writeNumber(splitRate); } generator.writeEndArray(); generator.writeEndObject(); } generator.writeEndArray(); generator.writeEndObject(); generator.writeEndObject(); } } else { for (String filename : filenames) { generator.writeStartObject(); generator.writeStringField("file_name", filename); generator.writeObjectFieldStart("label"); generator.writeStringField("task_type", taskType.name()); generator.writeArrayFieldStart("targets"); for (LabelConfigV2 taskSpecificConfig : taskSpecificConfigs) { generator.writeStartObject(); generator.writeStringField("node", taskSpecificConfig.label().labelsAsString()); String property = taskSpecificConfig.property(); if (StringUtils.isNotEmpty(property)){ generator.writeStringField("predicate", property); } else { warnings.add(String.format("'predicate' field is missing for %s task. If the target nodes have more than one predicate defining the target node feature, the training task will fail with an error.", taskType)); } generator.writeArrayFieldStart("split_rate"); for (Double splitRate : taskSpecificConfig.splitRates()) { generator.writeNumber(splitRate); } generator.writeEndArray(); generator.writeEndObject(); } generator.writeEndArray(); generator.writeEndObject(); generator.writeEndObject(); } } } } generator.writeEndArray(); } }
4,340
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/RdfTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; public enum RdfTaskTypeV2 { classification, regression, link_prediction; public void validate(String predicate, Label label) { // Do nothing // ParsingContext context = new ParsingContext(String.format("node %s specification", name()), NeptuneMLSourceDataModel.RDF).withLabel(label); // if (StringUtils.isEmpty(predicate)) { // throw new IllegalArgumentException(String.format("Missing or empty 'predicate' field for %s.", context)); // } } }
4,341
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/ImputerTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import java.util.ArrayList; import java.util.Collection; public enum ImputerTypeV2 { mean, median, most_frequent{ @Override public String formattedName() { return "most-frequent"; } }, none{ @Override public boolean isPublic(){ return false; } }; public String formattedName() { return name(); } public boolean isPublic() { return true; } @Override public String toString() { return formattedName(); } public static ImputerTypeV2 fromString(String s) { for (ImputerTypeV2 imputerType : ImputerTypeV2.values()) { if (imputerType.formattedName().equals(s)) { return imputerType; } } throw new IllegalArgumentException(String.format("Invalid imputer type: %s (valid types are: %s)", s, String.join(", ", publicFormattedNames()))); } public static Collection<String> publicFormattedNames() { Collection<String> results = new ArrayList<>(); for (ImputerTypeV2 imputerType : ImputerTypeV2.values()) { if (imputerType.isPublic()){ results.add(imputerType.formattedName()); } } return results; } }
4,342
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/FeatureOverrideConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class FeatureOverrideConfigV2 { private final Label label; private final Collection<String> properties; private final FeatureTypeV2 featureType; private final Norm norm; private final Separator separator; private final ImputerTypeV2 imputerType; public FeatureOverrideConfigV2(Label label, Collection<String> properties, FeatureTypeV2 featureType, Norm norm, Separator separator, ImputerTypeV2 imputerType) { this.label = label; this.properties = properties; this.featureType = featureType; this.norm = norm; this.separator = separator; this.imputerType = imputerType; } public Label label() { return label; } public boolean isSinglePropertyOverride() { return properties.size() == 1; } public String firstProperty() { return properties.iterator().next(); } public Collection<String> properties() { return properties; } public FeatureTypeV2 featureType() { return featureType; } public Norm norm() { return norm; } public Separator separator() { return separator; } public ImputerTypeV2 imputer() { return imputerType; } }
4,343
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/DatetimeConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class DatetimeConfigV2 { private final Label label; private final String property; private final Collection<DatetimePartV2> datetimeParts; public DatetimeConfigV2(Label label, String property, Collection<DatetimePartV2> datetimeParts) { this.label = label; this.property = property; this.datetimeParts = datetimeParts; } public Label label() { return label; } public String property() { return property; } public Collection<DatetimePartV2> datetimeParts() { return datetimeParts; } @Override public String toString() { return "DatetimeConfigV2{" + "label=" + label + ", property='" + property + '\'' + ", datetimeParts=" + datetimeParts + '}'; } }
4,344
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/DatetimePartV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; public enum DatetimePartV2 { hour, weekday, month, year }
4,345
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/LabelConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class LabelConfigV2 { private final Label nodeType; private final String taskType; private final String property; private final String subject; private final String object; private final Collection<Double> splitRates; private final Separator separator; public LabelConfigV2(Label nodeType, String taskType, String property, String subject, String object, Collection<Double> splitRates, Separator separator) { this.nodeType = nodeType; this.taskType = taskType; this.property = property; this.subject = subject; this.object = object; this.splitRates = splitRates; this.separator = separator; } public Label label() { return nodeType; } public String taskType() { return taskType; } public String property() { return property; } public String subject() { return subject; } public String object() { return object; } public Collection<Double> splitRates() { return splitRates; } public Separator separator() { return separator; } }
4,346
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TfIdfConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.propertygraph.Label; public class TfIdfConfigV2 { private final Label label; private final String property; private final Range ngramRange; private final int minDf; private final int maxFeatures; public TfIdfConfigV2(Label label, String property, Range ngramRange, int minDf, int maxFeatures) { this.label = label; this.property = property; this.ngramRange = ngramRange; this.minDf = minDf; this.maxFeatures = maxFeatures; } public Label label() { return label; } public String property() { return property; } public Range ngramRange() { return ngramRange; } public Integer minDf() { return minDf; } public Integer maxFeatures() { return maxFeatures; } @Override public String toString() { return "TfIdfConfigV2{" + "label=" + label + ", property='" + property + '\'' + ", ngramRange=" + ngramRange + ", minDf=" + minDf + ", maxFeatures=" + maxFeatures + '}'; } }
4,347
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TrainingDataWriterConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParseSplitRate; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseFeaturesV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing.ParseLabelsV2; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Set; import java.util.stream.Collectors; public class TrainingDataWriterConfigV2 { public static final Collection<Double> DEFAULT_SPLIT_RATES_V2 = Arrays.asList(0.9, 0.1, 0.0); private static final String DEFAULT_NAME_V2 = "training-data-configuration"; public static Collection<TrainingDataWriterConfigV2> fromJson(JsonNode json, NeptuneMLSourceDataModel dataModel) { Collection<TrainingDataWriterConfigV2> results = new ArrayList<>(); if (json.isArray()) { ArrayNode configNodes = (ArrayNode) json; int index = 1; for (JsonNode configNode : configNodes) { results.add(getTrainingJobWriterConfig(configNode, index++, dataModel)); } } else { if (json.has("jobs")) { ArrayNode configNodes = (ArrayNode) json.get("jobs"); int index = 1; for (JsonNode configNode : configNodes) { results.add(getTrainingJobWriterConfig(configNode, index++, dataModel)); } } else { results.add(getTrainingJobWriterConfig(json, 1, dataModel)); } } Set<String> names = results.stream().map(TrainingDataWriterConfigV2::name).collect(Collectors.toSet()); if (names.size() < results.size()) { throw new IllegalStateException(String.format("Training data configuration names must be unique: %s", names)); } return results; } private static TrainingDataWriterConfigV2 getTrainingJobWriterConfig(JsonNode json, int index, NeptuneMLSourceDataModel dataModel) { Collection<Double> defaultSplitRates = new ParseSplitRate(json, DEFAULT_SPLIT_RATES_V2, new ParsingContext("config")).parseSplitRates(); Collection<LabelConfigV2> nodeClassLabels = new ArrayList<>(); Collection<LabelConfigV2> edgeClassLabels = new ArrayList<>(); Collection<NoneFeatureConfig> noneNodeFeatures = new ArrayList<>(); Collection<TfIdfConfigV2> tfIdfNodeFeatures = new ArrayList<>(); Collection<DatetimeConfigV2> datetimeNodeFeatures = new ArrayList<>(); Collection<Word2VecConfig> word2VecNodeFeatures = new ArrayList<>(); Collection<FastTextConfig> fastTextNodeFeatures = new ArrayList<>(); Collection<SbertConfig> sbertNodeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV2> numericalBucketNodeFeatures = new ArrayList<>(); Collection<NoneFeatureConfig> noneEdgeFeatures = new ArrayList<>(); Collection<TfIdfConfigV2> tfIdfEdgeFeatures = new ArrayList<>(); Collection<DatetimeConfigV2> datetimeEdgeFeatures = new ArrayList<>(); Collection<Word2VecConfig> word2VecEdgeFeatures = new ArrayList<>(); Collection<FastTextConfig> fastTextEdgeFeatures = new ArrayList<>(); Collection<SbertConfig> sbertEdgeFeatures = new ArrayList<>(); Collection<NumericalBucketFeatureConfigV2> numericalBucketEdgeFeatures = new ArrayList<>(); Collection<FeatureOverrideConfigV2> nodeFeatureOverrides = new ArrayList<>(); Collection<FeatureOverrideConfigV2> edgeFeatureOverrides = new ArrayList<>(); String name = json.has("name") ? json.get("name").textValue() : index > 1 ? String.format("%s-%s", DEFAULT_NAME_V2, index) : DEFAULT_NAME_V2; FeatureEncodingFlag featureEncodingFlag = FeatureEncodingFlag.auto; if (json.has("feature_encoding")) { try { featureEncodingFlag = FeatureEncodingFlag.valueOf(json.path("feature_encoding").textValue()); } catch (IllegalArgumentException e) { // Use default value of auto } } if (json.has("targets")) { JsonNode labels = json.path("targets"); Collection<JsonNode> labelNodes = new ArrayList<>(); if (labels.isArray()) { labels.forEach(labelNodes::add); } else { labelNodes.add(labels); } ParseLabelsV2 parseLabels = new ParseLabelsV2(labelNodes, defaultSplitRates, dataModel); parseLabels.validate(); nodeClassLabels.addAll(parseLabels.parseNodeClassLabels()); edgeClassLabels.addAll(parseLabels.parseEdgeClassLabels()); } if (json.has("features")) { JsonNode features = json.path("features"); Collection<JsonNode> featureNodes = new ArrayList<>(); if (features.isArray()) { features.forEach(featureNodes::add); } else { featureNodes.add(features); } ParseFeaturesV2 parseFeatures = new ParseFeaturesV2(featureNodes); parseFeatures.validate(); noneNodeFeatures.addAll(parseFeatures.parseNoneFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); tfIdfNodeFeatures.addAll(parseFeatures.parseTfIdfFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); datetimeNodeFeatures.addAll(parseFeatures.parseDatetimeFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); word2VecNodeFeatures.addAll(parseFeatures.parseWord2VecFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); fastTextNodeFeatures.addAll(parseFeatures.parseFastTextFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); sbertNodeFeatures.addAll(parseFeatures.parseSbertFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); numericalBucketNodeFeatures.addAll(parseFeatures.parseNumericalBucketFeatures(ParseFeaturesV2.NodeFeatureFilter, ParseFeaturesV2.NodeLabelSupplier)); noneEdgeFeatures.addAll(parseFeatures.parseNoneFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); tfIdfEdgeFeatures.addAll(parseFeatures.parseTfIdfFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); datetimeEdgeFeatures.addAll(parseFeatures.parseDatetimeFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); word2VecEdgeFeatures.addAll(parseFeatures.parseWord2VecFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); fastTextEdgeFeatures.addAll(parseFeatures.parseFastTextFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); sbertEdgeFeatures.addAll(parseFeatures.parseSbertFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); numericalBucketEdgeFeatures.addAll(parseFeatures.parseNumericalBucketFeatures(ParseFeaturesV2.EdgeFeatureFilter, ParseFeaturesV2.EdgeLabelSupplier)); nodeFeatureOverrides.addAll(parseFeatures.parseNodeFeatureOverrides()); edgeFeatureOverrides.addAll(parseFeatures.parseEdgeFeatureOverrides()); } ElementConfig nodeConfig = new ElementConfig( nodeClassLabels, noneNodeFeatures, tfIdfNodeFeatures, datetimeNodeFeatures, word2VecNodeFeatures, fastTextNodeFeatures, sbertNodeFeatures, numericalBucketNodeFeatures, nodeFeatureOverrides); ElementConfig edgeConfig = new ElementConfig( edgeClassLabels, noneEdgeFeatures, tfIdfEdgeFeatures, datetimeEdgeFeatures, word2VecEdgeFeatures, fastTextEdgeFeatures, sbertEdgeFeatures, numericalBucketEdgeFeatures, edgeFeatureOverrides); return new TrainingDataWriterConfigV2(name, featureEncodingFlag, defaultSplitRates, nodeConfig, edgeConfig); } private final String name; private final FeatureEncodingFlag featureEncodingFlag; private final Collection<Double> defaultSplitRates; private final ElementConfig nodeConfig; private final ElementConfig edgeConfig; public TrainingDataWriterConfigV2() { this(DEFAULT_NAME_V2, FeatureEncodingFlag.auto, DEFAULT_SPLIT_RATES_V2, ElementConfig.EMPTY_CONFIG, ElementConfig.EMPTY_CONFIG); } public TrainingDataWriterConfigV2(String name, FeatureEncodingFlag featureEncodingFlag, Collection<Double> defaultSplitRates, ElementConfig nodeConfig, ElementConfig edgeConfig) { this.name = name; this.featureEncodingFlag = featureEncodingFlag; this.defaultSplitRates = defaultSplitRates; this.nodeConfig = nodeConfig; this.edgeConfig = edgeConfig; } public String name() { return name; } public boolean allowFeatureEncoding() { return featureEncodingFlag == FeatureEncodingFlag.auto; } public Collection<Double> defaultSplitRates() { return defaultSplitRates; } public ElementConfig nodeConfig() { return nodeConfig; } public ElementConfig edgeConfig() { return edgeConfig; } @Override public String toString() { return "TrainingDataWriterConfigV2{" + "name='" + name + '\'' + ", defaultSplitRates=" + defaultSplitRates + ", nodeConfig=" + nodeConfig + ", edgeConfig=" + edgeConfig + '}'; } }
4,348
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/NumericalBucketFeatureConfigV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.amazonaws.services.neptune.propertygraph.Label; public class NumericalBucketFeatureConfigV2 { private final Label label; private final String property; private final Range range; private final int bucketCount; private final int slideWindowSize; private final ImputerTypeV2 imputerType; public NumericalBucketFeatureConfigV2(Label label, String property, Range range, int bucketCount, int slideWindowSize, ImputerTypeV2 imputerType) { this.label = label; this.property = property; this.range = range; this.bucketCount = bucketCount; this.slideWindowSize = slideWindowSize; this.imputerType = imputerType; } public Label label() { return label; } public String property() { return property; } public Integer bucketCount() { return bucketCount; } public Integer slideWindowSize() { return slideWindowSize; } public Range range() { return range; } public ImputerTypeV2 imputerType() { return imputerType; } @Override public String toString() { return "NumericalBucketFeatureConfig{" + "label=" + label + ", property='" + property + '\'' + ", range=" + range + ", bucketCount=" + bucketCount + ", slideWindowSize=" + slideWindowSize + ", imputerType=" + imputerType + '}'; } }
4,349
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/FeatureTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; public enum FeatureTypeV2 { bucket_numerical { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("separator")) { throw new IllegalArgumentException(String.format("Invalid 'separator' field for %s. Bucket numerical feature property cannot contain multiple values.", context)); } } }, text_word2vec { @Override public Collection<String> validNames(){ return Arrays.asList(name(), "word2vec"); } @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_fasttext { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_sbert { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_sbert128 { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, text_sbert512 { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, category { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, numerical, text_tfidf { @Override public void validateOverride(JsonNode json, ParsingContext context) { if (json.has("imputer")) { throw new IllegalArgumentException(String.format("Invalid 'imputer' field for %s.", context)); } } }, datetime, auto, none; public void validateOverride(JsonNode node, ParsingContext context) { //Do nothing } public Collection<String> validNames(){ return Collections.singletonList(name()); } public static FeatureTypeV2 fromString(String s) { for (FeatureTypeV2 featureType : FeatureTypeV2.values()) { for (String validName : featureType.validNames()) { if (validName.equals(s)){ return featureType; } } } throw new IllegalArgumentException(String.format("Invalid feature type: %s (valid types are: %s)", s, String.join(", ", publicFormattedNames()))); } public static Collection<String> publicFormattedNames() { Collection<String> results = new ArrayList<>(); for (FeatureTypeV2 featureType : FeatureTypeV2.values()) { results.add(featureType.name()); } return results; } }
4,350
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/NoneFeatureConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; public class NoneFeatureConfig { private final Label label; private final String property; public NoneFeatureConfig(Label label, String property) { this.label = label; this.property = property; } public Label label() { return label; } public String property() { return property; } @Override public String toString() { return "NoneFeatureConfig{" + "label=" + label + ", property='" + property + '\'' + '}'; } }
4,351
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/ElementConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Word2VecConfig; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; import java.util.Collections; import java.util.stream.Collectors; public class ElementConfig { public static final ElementConfig EMPTY_CONFIG = new ElementConfig( Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); private final Collection<LabelConfigV2> classLabels; private final Collection<NoneFeatureConfig> noneFeatures; private final Collection<TfIdfConfigV2> tfIdfFeatures; private final Collection<DatetimeConfigV2> datetimeFeatures; private final Collection<Word2VecConfig> word2VecFeatures; private final Collection<FastTextConfig> fastTextFeatures; private final Collection<SbertConfig> sbertFeatures; private final Collection<NumericalBucketFeatureConfigV2> numericalBucketFeatures; private final Collection<FeatureOverrideConfigV2> featureOverrides; public ElementConfig(Collection<LabelConfigV2> classLabels, Collection<NoneFeatureConfig> noneFeatures, Collection<TfIdfConfigV2> tfIdfFeatures, Collection<DatetimeConfigV2> datetimeFeatures, Collection<Word2VecConfig> word2VecFeatures, Collection<FastTextConfig> fastTextFeatures, Collection<SbertConfig> sbertFeatures, Collection<NumericalBucketFeatureConfigV2> numericalBucketFeatures, Collection<FeatureOverrideConfigV2> featureOverrides) { this.classLabels = classLabels; this.noneFeatures = noneFeatures; this.tfIdfFeatures = tfIdfFeatures; this.datetimeFeatures = datetimeFeatures; this.word2VecFeatures = word2VecFeatures; this.fastTextFeatures = fastTextFeatures; this.sbertFeatures = sbertFeatures; this.numericalBucketFeatures = numericalBucketFeatures; this.featureOverrides = featureOverrides; } public boolean allowAutoInferFeature(Label label, String property){ if (hasClassificationSpecificationForProperty(label, property)) { return false; } if (hasNoneFeatureSpecification(label, property)){ return false; } if (hasTfIdfSpecification(label, property)){ return false; } if (hasDatetimeSpecification(label, property)){ return false; } if (hasWord2VecSpecification(label, property)){ return false; } if (hasFastTextSpecification(label, property)){ return false; } if (hasSbertSpecification(label, property)){ return false; } if (hasNumericalBucketSpecification(label, property)){ return false; } if (hasFeatureOverrideForProperty(label, property)){ return false; } return true; } public boolean hasClassificationSpecificationsFor(Label label) { return !getClassificationSpecifications(label).isEmpty(); } public Collection<LabelConfigV2> getClassificationSpecifications(Label label) { return classLabels.stream().filter(c -> c.label().equals(label)).collect(Collectors.toList()); } public boolean hasClassificationSpecificationForProperty(Label label, String property) { return getClassificationSpecifications(label).stream().anyMatch(s -> s.property().equals(property)); } public Collection<LabelConfigV2> getAllClassificationSpecifications(){ return classLabels; } public boolean hasNoneFeatureSpecification(Label label, String property) { return getNoneFeatureSpecification(label, property) != null; } public NoneFeatureConfig getNoneFeatureSpecification(Label label, String property) { return noneFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasTfIdfSpecification(Label label, String property) { return getTfIdfSpecification(label, property) != null; } public TfIdfConfigV2 getTfIdfSpecification(Label label, String property) { return tfIdfFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasDatetimeSpecification(Label label, String property) { return getDatetimeSpecification(label, property) != null; } public DatetimeConfigV2 getDatetimeSpecification(Label label, String property) { return datetimeFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasWord2VecSpecification(Label label, String property) { return getWord2VecSpecification(label, property) != null; } public Word2VecConfig getWord2VecSpecification(Label label, String property) { return word2VecFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasFastTextSpecification(Label label, String property) { return getFastTextSpecification(label, property) != null; } public FastTextConfig getFastTextSpecification(Label label, String property) { return fastTextFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasSbertSpecification(Label label, String property) { return getSbertSpecification(label, property) != null; } public SbertConfig getSbertSpecification(Label label, String property) { return sbertFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasNumericalBucketSpecification(Label label, String property) { return getNumericalBucketSpecification(label, property) != null; } public NumericalBucketFeatureConfigV2 getNumericalBucketSpecification(Label label, String property) { return numericalBucketFeatures.stream() .filter(config -> config.label().equals(label) && config.property().equals(property)) .findFirst() .orElse(null); } public boolean hasFeatureOverrideForProperty(Label label, String property) { return featureOverrides.stream() .anyMatch(override -> override.label().equals(label) && override.properties().contains(property)); } public Collection<FeatureOverrideConfigV2> getFeatureOverrides(Label label) { return featureOverrides.stream() .filter(c -> c.label().equals(label)) .collect(Collectors.toList()); } public FeatureOverrideConfigV2 getFeatureOverride(Label label, String property) { return featureOverrides.stream() .filter(config -> config.label().equals(label) && config.properties().contains(property)) .findFirst() .orElse(null); } @Override public String toString() { return "ElementConfig{" + "classLabels=" + classLabels + ", tfIdfFeatures=" + tfIdfFeatures + ", datetimeFeatures=" + datetimeFeatures + ", word2VecFeatures=" + word2VecFeatures + ", numericalBucketFeatures=" + numericalBucketFeatures + ", featureOverrides=" + featureOverrides + '}'; } }
4,352
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/SupportedFastTextLanguages.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; public enum SupportedFastTextLanguages { en, zh, hi, es, fr }
4,353
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/NodeTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; public enum NodeTaskTypeV2 { classification, regression }
4,354
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/SbertConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; public class SbertConfig { private final Label label; private final String property; private final String name; public SbertConfig(Label label, String property, String name) { this.label = label; this.property = property; this.name = name; } public Label label() { return label; } public String property() { return property; } public String name() { return name; } @Override public String toString() { return "SbertConfig{" + "label=" + label + ", property='" + property + '\'' + ", name='" + name + '\'' + '}'; } }
4,355
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/EdgeTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.propertygraph.Label; import org.apache.commons.lang.StringUtils; public enum EdgeTaskTypeV2 { classification, regression, link_prediction { @Override public void validate(String property, Label label){ // Do nothing } }; public void validate(String property, Label label) { ParsingContext context = new ParsingContext(String.format("edge %s specification", name())).withLabel(label); if (StringUtils.isEmpty(property)){ throw new IllegalArgumentException(String.format("Missing or empty 'property' field for %s.", context)); } } }
4,356
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/TaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; public enum TaskTypeV2 { link_prediction, node_classification, node_regression, edge_classification, edge_regression }
4,357
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/FastTextConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Optional; public class FastTextConfig { private final Label label; private final String property; private final String language; private final Integer maxLength; public FastTextConfig(Label label, String property, String language, Integer maxLength) { this.label = label; this.property = property; this.language = language; this.maxLength = maxLength; } public Label label() { return label; } public String property() { return property; } public String language() { return language; } public Optional<Integer> maxLength() { return Optional.ofNullable(maxLength); } @Override public String toString() { return "FastTextConfig{" + "label=" + label + ", property='" + property + '\'' + ", language=" + language + ", maxLength=" + maxLength + '}'; } }
4,358
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/config/FeatureEncodingFlag.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.config; public enum FeatureEncodingFlag { none, auto }
4,359
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseFastTextLanguage.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.SupportedFastTextLanguages; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseFastTextLanguage { private final JsonNode json; private final ParsingContext context; public ParseFastTextLanguage(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public String parseLanguage() { if (json.has("language")) { if (json.get("language").isTextual()) { return json.get("language").textValue(); } } throw ErrorMessageHelper.errorParsingField( "language", context, "one of the following values: " + ErrorMessageHelper.quoteList(Arrays.asList(SupportedFastTextLanguages.values()))); } }
4,360
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseMaxFeaturesV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; public class ParseMaxFeaturesV2 { private final JsonNode json; private final ParsingContext context; public ParseMaxFeaturesV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public int parseMaxFeatures() { if (json.has("max_features") && json.path("max_features").isInt()) { return json.path("max_features").asInt(); } else { throw ErrorMessageHelper.errorParsingField("max_features", context, "an integer value"); } } }
4,361
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseSbertTypeName.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.FeatureTypeV2; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseSbertTypeName { private final JsonNode json; private final ParsingContext context; public ParseSbertTypeName(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public String parseTypeName() { if (json.has("type") && json.get("type").isTextual()) { return json.get("type").textValue(); } throw ErrorMessageHelper.errorParsingField( "type", context, "one of the following values: " + ErrorMessageHelper.quoteList(Arrays.asList( FeatureTypeV2.text_sbert.name(), FeatureTypeV2.text_sbert128.name(), FeatureTypeV2.text_sbert512.name() ))); } }
4,362
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseLabelsV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.*; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.EdgeTaskTypeV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.LabelConfigV2; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.RdfTaskTypeV2; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseLabelsV2 { private final Collection<JsonNode> config; private final Collection<Double> defaultSplitRates; private final NeptuneMLSourceDataModel dataModel; public ParseLabelsV2(Collection<JsonNode> config, Collection<Double> defaultSplitRates, NeptuneMLSourceDataModel dataModel) { this.config = config; this.defaultSplitRates = defaultSplitRates; this.dataModel = dataModel; } public Collection<LabelConfigV2> parseNodeClassLabels() { Collection<LabelConfigV2> nodeClassLabels = new ArrayList<>(); for (JsonNode json : config) { if (dataModel.isRdfLinkPrediction(json)) { ParsingContext context = new ParsingContext("edge"); String subject = new ParseSubject(json, context).parseSubject(); String predicate = dataModel.parseProperty(json, context, null); String object = new ParseObject(json, context).parseObject(); Collection<Double> splitRates = new ParseSplitRate(json, defaultSplitRates, context).parseSplitRates(); nodeClassLabels.add(new LabelConfigV2(null, RdfTaskTypeV2.link_prediction.name(), predicate, subject, object, splitRates, null)); } else if (isNodeClass(json)) { ParsingContext context = new ParsingContext(String.format("node %s", dataModel.nodeTypeName().toLowerCase())); Label nodeType = new ParseNodeType(json, context).parseNodeType(); String property = dataModel.parseProperty(json, context, nodeType); ParsingContext propertyContext = context.withLabel(nodeType).withProperty(property); String taskType = dataModel.parseTaskType(json, propertyContext, nodeType, property); Separator separator = new ParseSeparator(json).parseSeparator(); Collection<Double> splitRates = new ParseSplitRate(json, defaultSplitRates, propertyContext).parseSplitRates(); nodeClassLabels.add(new LabelConfigV2(nodeType, taskType, property, null, null, splitRates, separator)); } } return nodeClassLabels; } private boolean isNodeClass(JsonNode json) { return json.has("node"); } private boolean isEdgeClass(JsonNode json) { return json.has("edge"); } public void validate() { for (JsonNode json : config) { if (!dataModel.isRdfLinkPrediction(json)) { if (!isNodeClass(json) && !isEdgeClass(json)) { throw new IllegalArgumentException(String.format("Illegal target element. Expected %s field.", dataModel.labelFields())); } } } } public Collection<LabelConfigV2> parseEdgeClassLabels() { Collection<LabelConfigV2> edgeClassLabels = new ArrayList<>(); for (JsonNode json : config) { if (isEdgeClass(json)) { ParsingContext context = new ParsingContext("edge label"); Label edgeType = new ParseEdgeType(json, context).parseEdgeType(); String property = new ParseProperty(json, context.withLabel(edgeType)).parseNullableSingleProperty(); ParsingContext propertyContext = context.withLabel(edgeType).withProperty(property); EdgeTaskTypeV2 taskType = new ParseEdgeTaskTypeV2(json, propertyContext).parseTaskType(); taskType.validate(property, edgeType); Separator separator = new ParseSeparator(json).parseSeparator(); Collection<Double> splitRates = new ParseSplitRate(json, defaultSplitRates, propertyContext).parseSplitRates(); edgeClassLabels.add(new LabelConfigV2(edgeType, taskType.name(), property, null, null, splitRates, separator)); } } return edgeClassLabels; } }
4,363
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.TaskTypeV2; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseTaskTypeV2 { private final JsonNode json; private final ParsingContext context; public ParseTaskTypeV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public TaskTypeV2 parseTaskType() { if (json.has("task_type")) { String taskType = json.get("task_type").textValue(); try { return TaskTypeV2.valueOf(taskType); } catch (IllegalArgumentException e) { throw ErrorMessageHelper.invalidFieldValue("task_type", taskType, context, taskTypeNames()); } } throw ErrorMessageHelper.errorParsingField("task_type", context, "one of the following values: " + ErrorMessageHelper.quoteList(taskTypeNames())); } private Collection<String> taskTypeNames() { Collection<String> results = new ArrayList<>(); for (TaskTypeV2 taskTypeV2 : TaskTypeV2.values()) { results.add(taskTypeV2.name()); } return results; } }
4,364
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseBucketCountV2.java
package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; public class ParseBucketCountV2 { private final JsonNode json; private final ParsingContext context; public ParseBucketCountV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public int parseBucketCount() { if (json.has("bucket_cnt") && json.path("bucket_cnt").isInt()) { return json.path("bucket_cnt").asInt(); } else if (json.has("num_buckets") && json.path("num_buckets").isInt()) { return json.path("num_buckets").asInt(); } else { throw ErrorMessageHelper.errorParsingField("bucket_cnt", context, "an integer"); } } }
4,365
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseFeatureTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.FeatureTypeV2; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseFeatureTypeV2 { private final JsonNode json; private final ParsingContext context; public ParseFeatureTypeV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public FeatureTypeV2 parseFeatureType() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); if (type.equals("numerical") || type.equals("category") || type.equals("auto") || type.equals("none")) { return FeatureTypeV2.valueOf(type); } else { throw ErrorMessageHelper.invalidFieldValue("type", type, context, Arrays.asList("numerical", "category", "auto", "none")); } } throw ErrorMessageHelper.errorParsingField("type", context, "one of the following values: 'numerical', 'category', 'auto', 'none'"); } }
4,366
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseMinDfV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; public class ParseMinDfV2 { private final JsonNode json; private final ParsingContext context; public ParseMinDfV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public int parseMinDf() { if (json.has("min_df") && json.path("min_df").isInt()) { return json.path("min_df").asInt(); } else { throw ErrorMessageHelper.errorParsingField("min_df", context, "an integer value"); } } }
4,367
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseDatetimePartsV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.DatetimePartV2; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; public class ParseDatetimePartsV2 { private final JsonNode json; private final ParsingContext context; public ParseDatetimePartsV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public Collection<DatetimePartV2> parseDatetimeParts() { if (json.has("datetime_parts")) { if (json.get("datetime_parts").isArray()) { ArrayNode datetimeParts = (ArrayNode) json.get("datetime_parts"); Collection<DatetimePartV2> results = new ArrayList<>(); for (JsonNode datetimePart : datetimeParts) { String value = datetimePart.textValue(); try { results.add(DatetimePartV2.valueOf(value)); } catch (IllegalArgumentException e) { throw ErrorMessageHelper.invalidFieldValue("datetime_parts", value, context, datetimePartNames()); } } return results.isEmpty() ? Arrays.asList(DatetimePartV2.values()) : results; } else { throw ErrorMessageHelper.errorParsingField("datetime_parts", context, "an array value"); } } else { return Arrays.asList(DatetimePartV2.values()); } } private Collection<String> datetimePartNames() { Collection<String> results = new ArrayList<>(); for (DatetimePartV2 value : DatetimePartV2.values()) { results.add(value.name()); } return results; } }
4,368
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseRdfTaskType.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.RdfTaskTypeV2; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseRdfTaskType { private final JsonNode json; private final ParsingContext context; public ParseRdfTaskType(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public RdfTaskTypeV2 parseTaskType() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); try { return RdfTaskTypeV2.valueOf(type); } catch (IllegalArgumentException e) { throw ErrorMessageHelper.invalidFieldValue( "type", type, context, ErrorMessageHelper.enumNames(Arrays.asList(RdfTaskTypeV2.values()))); } } throw ErrorMessageHelper.errorParsingField( "type", context, "one of the following values: " + ErrorMessageHelper.quoteList(Arrays.asList(RdfTaskTypeV2.values()))); } }
4,369
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseFeaturesV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.*; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.*; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.*; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseFeaturesV2 { private final Collection<JsonNode> features; public ParseFeaturesV2(Collection<JsonNode> features) { this.features = features; } public interface ElementFeatureFilter { boolean isCorrectType(JsonNode json); } public interface LabelSupplier { Label getLabel(JsonNode json, ParsingContext context); } public static ElementFeatureFilter NodeFeatureFilter = json -> json.has("node") && json.has("type"); public static ElementFeatureFilter EdgeFeatureFilter = json -> json.has("edge") && json.has("type"); public static LabelSupplier NodeLabelSupplier = (json, context) -> new ParseNodeType(json, context).parseNodeType(); public static LabelSupplier EdgeLabelSupplier = (json, context) -> new ParseEdgeType(json, context).parseEdgeType(); public void validate() { for (JsonNode feature : features) { if (!isNoneFeature(feature) && !isTfIdfFeature(feature) && !isDatetimeFeature(feature) && !isAutoFeature(feature) && !isWord2VecFeature(feature) && !isFastTextFeature(feature) && !isSbertTextFeature(feature) && !isNumericalBucketFeature(feature) && !isNodeFeatureOverride(feature) && !isEdgeFeatureOverride(feature)) { if (feature.has("type")) { String featureType = feature.get("type").textValue(); throw new IllegalArgumentException( String.format("Illegal feature type: '%s'. Supported values are: %s.", featureType, ErrorMessageHelper.quoteList(FeatureTypeV2.publicFormattedNames()))); } } } } public Collection<NoneFeatureConfig> parseNoneFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<NoneFeatureConfig> noneFeatures = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isNoneFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.none.name() + " feature"); Label label = supplier.getLabel(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); NoneFeatureConfig config = new NoneFeatureConfig(label, property); noneFeatures.add(config); } } return noneFeatures; } public Collection<TfIdfConfigV2> parseTfIdfFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<TfIdfConfigV2> tfIdfFeatures = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isTfIdfFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.text_tfidf.name() + " feature"); Label label = supplier.getLabel(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(label).withProperty(property); Range ngramRange = new ParseRange(json, "ngram_range", propertyContext).parseRange(); int minDf = new ParseMinDfV2(json, propertyContext).parseMinDf(); int maxFeatures = new ParseMaxFeaturesV2(json, propertyContext).parseMaxFeatures(); TfIdfConfigV2 config = new TfIdfConfigV2(label, property, ngramRange, minDf, maxFeatures); tfIdfFeatures.add(config); } } return tfIdfFeatures; } public Collection<DatetimeConfigV2> parseDatetimeFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<DatetimeConfigV2> datetimeFeatures = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isDatetimeFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.datetime.name() + " feature"); Label label = supplier.getLabel(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); Collection<DatetimePartV2> datetimeParts = new ParseDatetimePartsV2(json, context.withLabel(label).withProperty(property)).parseDatetimeParts(); DatetimeConfigV2 config = new DatetimeConfigV2(label, property, datetimeParts); datetimeFeatures.add(config); } } return datetimeFeatures; } public Collection<Word2VecConfig> parseWord2VecFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<Word2VecConfig> word2VecFeatures = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isWord2VecFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.text_word2vec.name() + " feature"); Label label = supplier.getLabel(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); Collection<String> language = new ParseWord2VecLanguage(json).parseLanguage(); Word2VecConfig config = new Word2VecConfig(label, property, language); word2VecFeatures.add(config); } } return word2VecFeatures; } public Collection<SbertConfig> parseSbertFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<SbertConfig> sbertConfigs = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isSbertTextFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.text_sbert.name() + " feature"); Label label = supplier.getLabel(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); String name = new ParseSbertTypeName(json, context).parseTypeName(); SbertConfig config = new SbertConfig(label, property, name); sbertConfigs.add(config); } } return sbertConfigs; } public Collection<FastTextConfig> parseFastTextFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<FastTextConfig> fastTextFeatures = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isFastTextFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.text_fasttext.name() + " feature"); Label label = supplier.getLabel(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); String language = new ParseFastTextLanguage(json, context).parseLanguage(); Integer maxLength = new ParseMaxLength(json, context).parseMaxLength(); FastTextConfig config = new FastTextConfig(label, property, language, maxLength); fastTextFeatures.add(config); } } return fastTextFeatures; } public Collection<NumericalBucketFeatureConfigV2> parseNumericalBucketFeatures(ElementFeatureFilter filter, LabelSupplier supplier) { Collection<NumericalBucketFeatureConfigV2> numericalBucketFeatures = new ArrayList<>(); for (JsonNode json : features) { if (filter.isCorrectType(json) && isNumericalBucketFeature(json)) { ParsingContext context = new ParsingContext(FeatureTypeV2.bucket_numerical.name() + " feature"); Label label = supplier.getLabel(json, context); FeatureTypeV2.bucket_numerical.validateOverride(json, context); String property = new ParseProperty(json, context.withLabel(label)).parseSingleProperty(); ParsingContext propertyContext = context.withLabel(label).withProperty(property); Range range = new ParseRange(json, "range", propertyContext).parseRange(); int bucketCount = new ParseBucketCountV2(json, propertyContext).parseBucketCount(); int slideWindowSize = new ParseSlideWindowSize(json, propertyContext).parseSlideWindowSize(); ImputerTypeV2 imputerType = new ParseImputerTypeV2(json, propertyContext).parseImputerType(); NumericalBucketFeatureConfigV2 config = new NumericalBucketFeatureConfigV2(label, property, range, bucketCount, slideWindowSize, imputerType); numericalBucketFeatures.add(config); } } return numericalBucketFeatures; } public Collection<FeatureOverrideConfigV2> parseNodeFeatureOverrides() { Collection<FeatureOverrideConfigV2> featureOverrides = new ArrayList<>(); for (JsonNode json : features) { if (isNodeFeatureOverride(json)) { ParsingContext context = new ParsingContext("node feature"); Label nodeType = new ParseNodeType(json, context).parseNodeType(); Collection<String> properties = new ParseProperty(json, context.withLabel(nodeType)).parseMultipleProperties(); ParsingContext propertiesContext = context.withLabel(nodeType).withProperties(properties); FeatureTypeV2 type = new ParseFeatureTypeV2(json, propertiesContext).parseFeatureType(); type.validateOverride(json, context); Norm norm = new ParseNorm(json, propertiesContext).parseNorm(); Separator separator = new ParseSeparator(json).parseSeparator(); ImputerTypeV2 imputerType = new ParseImputerTypeV2(json, propertiesContext).parseImputerType(); FeatureOverrideConfigV2 config = new FeatureOverrideConfigV2(nodeType, properties, type, norm, separator, imputerType); featureOverrides.add(config); } } return featureOverrides; } public Collection<FeatureOverrideConfigV2> parseEdgeFeatureOverrides() { Collection<FeatureOverrideConfigV2> featureOverrides = new ArrayList<>(); for (JsonNode node : features) { if (isEdgeFeatureOverride(node)) { ParsingContext context = new ParsingContext("edge feature"); Label edgeType = new ParseEdgeType(node, context).parseEdgeType(); Collection<String> properties = new ParseProperty(node, context.withLabel(edgeType)).parseMultipleProperties(); ParsingContext propertiesContext = context.withLabel(edgeType).withProperties(properties); FeatureTypeV2 type = new ParseFeatureTypeV2(node, propertiesContext).parseFeatureType(); type.validateOverride(node, context); Norm norm = new ParseNorm(node, propertiesContext).parseNorm(); Separator separator = new ParseSeparator(node).parseSeparator(); ImputerTypeV2 imputerType = new ParseImputerTypeV2(node, context).parseImputerType(); FeatureOverrideConfigV2 config = new FeatureOverrideConfigV2(edgeType, properties, type, norm, separator, imputerType); featureOverrides.add(config); } } return featureOverrides; } private boolean isNoneFeature(JsonNode node) { return isNoneFeatureType(node.get("type").textValue()); } private boolean isTfIdfFeature(JsonNode node) { return isTfIdfType(node.get("type").textValue()); } private boolean isDatetimeFeature(JsonNode node) { return isDatetimeType(node.get("type").textValue()); } private boolean isAutoFeature(JsonNode node) { return isAutoType(node.get("type").textValue()); } private boolean isWord2VecFeature(JsonNode node) { return isWord2VecType(node.get("type").textValue()); } private boolean isFastTextFeature(JsonNode node) { return isFastTextType(node.get("type").textValue()); } private boolean isSbertTextFeature(JsonNode node) { return isSbertTextType(node.get("type").textValue()); } private boolean isNumericalBucketFeature(JsonNode node) { return isBucketNumericalType(node.get("type").textValue()); } private boolean isNodeFeatureOverride(JsonNode node) { if (isNodeFeature(node)) { String type = node.get("type").textValue(); return (isNumericalType(type) || isCategoricalType(type) || isAutoType(type) || isNoneType(type)); } return false; } private boolean isEdgeFeatureOverride(JsonNode node) { if (isEdgeFeature(node)) { String type = node.get("type").textValue(); return (isNumericalType(type) || isCategoricalType(type)); } return false; } private boolean isNodeFeature(JsonNode node) { return node.has("node") && node.has("type"); } private boolean isEdgeFeature(JsonNode node) { return node.has("edge") && node.has("type"); } private boolean isNoneFeatureType(String type) { return isOfType(FeatureTypeV2.none, type); } private boolean isTfIdfType(String type) { return isOfType(FeatureTypeV2.text_tfidf, type); } private boolean isDatetimeType(String type) { return isOfType(FeatureTypeV2.datetime, type); } private boolean isAutoType(String type) { return isOfType(FeatureTypeV2.auto, type); } private boolean isWord2VecType(String type) { return isOfType(FeatureTypeV2.text_word2vec, type); } private boolean isFastTextType(String type) { return isOfType(FeatureTypeV2.text_fasttext, type); } private boolean isSbertTextType(String type) { return isOfType(FeatureTypeV2.text_sbert, type) || isOfType(FeatureTypeV2.text_sbert128, type) || isOfType(FeatureTypeV2.text_sbert512, type); } private boolean isBucketNumericalType(String type) { return isOfType(FeatureTypeV2.bucket_numerical, type); } private boolean isCategoricalType(String type) { return isOfType(FeatureTypeV2.category, type); } private boolean isNumericalType(String type) { return isOfType(FeatureTypeV2.numerical, type); } private boolean isNoneType(String type) { return isOfType(FeatureTypeV2.none, type); } private boolean isOfType(FeatureTypeV2 featureTypeV2, String s) { for (String validName : featureTypeV2.validNames()) { if (validName.equals(s)) { return true; } } return false; } }
4,370
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseMaxLength.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.fasterxml.jackson.databind.JsonNode; public class ParseMaxLength { private final JsonNode json; private final ParsingContext context; public ParseMaxLength(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public Integer parseMaxLength() { if (json.has("max_length")) { if (json.path("max_length").isInt()) { return json.path("max_length").asInt(); } else { throw ErrorMessageHelper.errorParsingField("max_length", context, "an integer value"); } } else { return null; } } }
4,371
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseImputerTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.ImputerTypeV2; import com.fasterxml.jackson.databind.JsonNode; public class ParseImputerTypeV2 { private final JsonNode json; private final ParsingContext context; public ParseImputerTypeV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public ImputerTypeV2 parseImputerType() { if (json.has("imputer")) { String imputerType = json.get("imputer").textValue(); try { return ImputerTypeV2.fromString(imputerType); } catch (IllegalArgumentException e) { throw ErrorMessageHelper.invalidFieldValue("imputer", imputerType, context, ImputerTypeV2.publicFormattedNames()); } } return ImputerTypeV2.none; } }
4,372
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseEdgeTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.EdgeTaskTypeV2; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseEdgeTaskTypeV2 { private final JsonNode json; private final ParsingContext context; public ParseEdgeTaskTypeV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public EdgeTaskTypeV2 parseTaskType() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); try { return EdgeTaskTypeV2.valueOf(type); } catch (IllegalArgumentException e){ throw ErrorMessageHelper.invalidFieldValue( "type", type, context, ErrorMessageHelper.enumNames(Arrays.asList(EdgeTaskTypeV2.values()))); } } throw ErrorMessageHelper.errorParsingField( "type", context, "one of the following values: " + ErrorMessageHelper.quoteList(Arrays.asList(EdgeTaskTypeV2.values()))); } }
4,373
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/v2/parsing/ParseNodeTaskTypeV2.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.v2.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ErrorMessageHelper; import com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing.ParsingContext; import com.amazonaws.services.neptune.profiles.neptune_ml.v2.config.NodeTaskTypeV2; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; public class ParseNodeTaskTypeV2 { private final JsonNode json; private final ParsingContext context; public ParseNodeTaskTypeV2(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public NodeTaskTypeV2 parseTaskType() { if (json.has("type") && json.get("type").isTextual()) { String type = json.get("type").textValue(); try { return NodeTaskTypeV2.valueOf(type); } catch (IllegalArgumentException e){ throw ErrorMessageHelper.invalidFieldValue( "type", type, context, ErrorMessageHelper.enumNames(Arrays.asList(NodeTaskTypeV2.values()))); } } throw ErrorMessageHelper.errorParsingField( "type", context, "one of the following values: " + ErrorMessageHelper.quoteList(Arrays.asList(NodeTaskTypeV2.values()))); } }
4,374
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/PropertyName.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common; import com.amazonaws.services.neptune.propertygraph.io.PrinterOptions; import com.amazonaws.services.neptune.propertygraph.schema.PropertySchema; public interface PropertyName { String escaped(PropertySchema propertySchema, PrinterOptions printerOptions); String unescaped(PropertySchema propertySchema); }
4,375
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/config/Separator.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.config; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.lang.StringUtils; import java.io.IOException; public class Separator { private static final String DEFAULT_SEPARATOR = ";"; private final String separator; public Separator() { this(null); } public Separator(String separator) { this.separator = separator; } public void writeTo(JsonGenerator generator, boolean isMultiValue) throws IOException { if (StringUtils.isNotEmpty(separator)) { generator.writeStringField("separator", separator); } else if (isMultiValue) { generator.writeStringField("separator", DEFAULT_SEPARATOR); } } }
4,376
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/config/Norm.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.config; import com.fasterxml.jackson.core.JsonGenerator; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; public enum Norm { none { @Override public String formattedName() { return "none"; } }, min_max { @Override public String formattedName() { return "min-max"; } }, standard { @Override public String formattedName() { return "standard"; } }; public abstract String formattedName(); public void addTo(JsonGenerator generator) throws IOException { generator.writeStringField("norm", formattedName()); } @Override public String toString() { return formattedName(); } public static boolean isValid(String s) { for (Norm value : Norm.values()) { if (value.formattedName().equals(s)) { return true; } } return false; } public static Norm fromString(String s) { for (Norm value : Norm.values()) { if (value.formattedName().equals(s)) { return value; } } throw new IllegalArgumentException("Invalid norm value: " + s); } public static Collection<String> formattedNames() { Collection<String> results = new ArrayList<>(); for (Norm feature : Norm.values()) { results.add(feature.formattedName()); } return results; } }
4,377
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/config/Word2VecConfig.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.config; import com.amazonaws.services.neptune.propertygraph.Label; import java.util.Collection; public class Word2VecConfig { private final Label label; private final String property; private final Collection<String> languages; public Word2VecConfig(Label label, String property, Collection<String> languages) { this.label = label; this.property = property; this.languages = languages; } public Label label() { return label; } public String property() { return property; } public Collection<String> languages() { return languages; } @Override public String toString() { return "Word2VecConfig{" + "label=" + label + ", property='" + property + '\'' + ", languages=" + languages + '}'; } }
4,378
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/config/Range.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.config; import com.amazonaws.services.neptune.propertygraph.schema.DataType; public class Range { private final Object low; private final Object high; public Range(Object low, Object high) { DataType lowDataType = DataType.dataTypeFor(low.getClass()); DataType highDataType = DataType.dataTypeFor(high.getClass()); if (!lowDataType.isNumeric() || !highDataType.isNumeric()) { throw new IllegalArgumentException("Low and high values must be numeric"); } DataType dataType = DataType.getBroadestType(lowDataType, highDataType); Object highValue = dataType.convert(high); Object lowValue = dataType.convert(low); this.high = dataType.compare(highValue, lowValue) >= 0 ? highValue : lowValue; this.low = dataType.compare(highValue, lowValue) >= 0 ? lowValue : highValue; } public Object low() { return low; } public Object high() { return high; } @Override public String toString() { return "Range{" + "low=" + low + ", high=" + high + '}'; } }
4,379
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/config/SupportedWord2VecLanguages.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.config; public enum SupportedWord2VecLanguages { en_core_web_lg }
4,380
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseProperty.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; public class ParseProperty { private final JsonNode json; private final ParsingContext context; private final NeptuneMLSourceDataModel dataModel; public ParseProperty(JsonNode json, ParsingContext context, NeptuneMLSourceDataModel dataModel) { this.json = json; this.context = context; this.dataModel = dataModel; } public ParseProperty(JsonNode json, ParsingContext context) { this(json, context, NeptuneMLSourceDataModel.PropertyGraph); } public String parseSingleProperty() { String fieldName = dataModel.nodeAttributeNameSingular().toLowerCase(); if (json.has(fieldName) && json.get(fieldName).isTextual()) { return json.get(fieldName).textValue(); } else { throw ErrorMessageHelper.errorParsingField(fieldName, context, String.format("a '%s' field with a string value", fieldName)); } } public String parseNullableSingleProperty() { String fieldName = dataModel.nodeAttributeNameSingular().toLowerCase(); if (json.has(fieldName) && json.get(fieldName).isTextual()) { return json.get(fieldName).textValue(); } else { return ""; } } public Collection<String> parseMultipleProperties() { String fieldNameSingular = dataModel.nodeAttributeNameSingular().toLowerCase(); String fieldNamePlural = dataModel.nodeAttributeNamePlural().toLowerCase(); if (json.has(fieldNameSingular) && json.get(fieldNameSingular).isTextual()) { return Collections.singletonList(json.get(fieldNameSingular).textValue()); } if (json.has(fieldNamePlural) && json.get(fieldNamePlural).isArray()) { ArrayNode properties = (ArrayNode) json.get(fieldNamePlural); Collection<String> results = new ArrayList<>(); for (JsonNode property : properties) { results.add(property.textValue()); } return results; } else { throw new IllegalArgumentException(String.format("Expected a '%s' field with a string value, or a '%s' field with an array value for %s.", fieldNameSingular, fieldNamePlural, context)); } } }
4,381
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseEdgeType.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.*; public class ParseEdgeType { private final JsonNode json; private final ParsingContext parsingContext; public ParseEdgeType(JsonNode json, ParsingContext parsingContext) { this.json = json; this.parsingContext = parsingContext; } public Label parseEdgeType() { if (json.has("edge") && json.path("edge").isArray()){ ArrayNode array = (ArrayNode) json.get("edge"); if (array.size() != 3){ throw error(); } List<String> fromLabels = getLabels(array.get(0)); String edgeLabel = array.get(1).textValue(); List<String> toLabels = getLabels(array.get(2)); if (fromLabels.size() == 1 && toLabels.size() == 1){ return new Label(edgeLabel, fromLabels.get(0), toLabels.get(0)); } else { return new Label(edgeLabel, fromLabels, toLabels); } } else { throw error(); } } private List<String> getLabels(JsonNode jsonNode){ if (jsonNode.isTextual()){ return Collections.singletonList(jsonNode.textValue()); } else if (jsonNode.isArray()){ List<String> values = new ArrayList<>(); for (JsonNode element : jsonNode) { values.add(element.textValue()); } return values; } else { return Collections.emptyList(); } } private IllegalArgumentException error() { return ErrorMessageHelper.errorParsingField("edge", parsingContext, "an array with 3 values"); } }
4,382
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseSplitRate.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import java.util.ArrayList; import java.util.Collection; import java.util.Optional; public class ParseSplitRate { private final JsonNode json; private final Collection<Double> defaultSplitRates; private final ParsingContext context; public ParseSplitRate(JsonNode json, Collection<Double> defaultSplitRates, ParsingContext context) { this.json = json; this.defaultSplitRates = defaultSplitRates; this.context = context; } public Collection<Double> parseSplitRates() { if (json.has("split_rate")){ if (json.get("split_rate").isArray()){ ArrayNode splitRatesArray = (ArrayNode) json.get("split_rate"); if (splitRatesArray.size() == 3) { Collection<Double> splitRates = new ArrayList<>(); for (JsonNode jsonNode : splitRatesArray) { if (jsonNode.isDouble()) { splitRates.add(jsonNode.asDouble()); } else { throw error(); } } Optional<Double> sum = splitRates.stream().reduce(Double::sum); if (sum.orElse(0.0) != 1.0) { throw error(); } return splitRates; } else { throw error(); } } else { throw error(); } } else { return defaultSplitRates; } } private IllegalArgumentException error(){ return ErrorMessageHelper.errorParsingField("split_rate", context, "an array with 3 double values that add up to 1.0"); } }
4,383
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseSubject.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.fasterxml.jackson.databind.JsonNode; public class ParseSubject { private final JsonNode json; private final ParsingContext context; public ParseSubject(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public String parseSubject() { if (json.has("subject")) { JsonNode node = json.get("subject"); if (node.isTextual()) { return node.textValue(); } else { throw error(); } } else { return null; } } private IllegalArgumentException error() { return ErrorMessageHelper.errorParsingField("subject", context, "a text value"); } }
4,384
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseSeparator.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Separator; import com.fasterxml.jackson.databind.JsonNode; public class ParseSeparator { private final JsonNode json; public ParseSeparator(JsonNode json) { this.json = json; } public Separator parseSeparator(){ if (json.has("separator")){ return new Separator( json.get("separator").textValue()); } return new Separator(); } }
4,385
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseObject.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.fasterxml.jackson.databind.JsonNode; public class ParseObject { private final JsonNode json; private final ParsingContext context; public ParseObject(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public String parseObject() { if (json.has("object")) { JsonNode node = json.get("object"); if (node.isTextual()) { return node.textValue(); } else { throw error(); } } else { return null; } } private IllegalArgumentException error() { return ErrorMessageHelper.errorParsingField("object", context, "a text value"); } }
4,386
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseNorm.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Norm; import com.fasterxml.jackson.databind.JsonNode; public class ParseNorm { private final JsonNode json; private final ParsingContext context; public ParseNorm(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public Norm parseNorm(){ if (json.has("norm")){ String norm = json.get("norm").textValue(); if (Norm.isValid(norm)){ return Norm.fromString(norm); } else { throw ErrorMessageHelper.invalidFieldValue("norm", norm, context, Norm.formattedNames()); } } return Norm.min_max; } }
4,387
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseWord2VecLanguage.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.SupportedWord2VecLanguages; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseWord2VecLanguage { private final JsonNode json; public ParseWord2VecLanguage(JsonNode json) { this.json = json; } public Collection<String> parseLanguage() { Collection<String> results = new ArrayList<>(); if (json.has("language")) { if (json.get("language").isArray()) { JsonNode arrayNode = json.get("language"); for (JsonNode jsonNode : arrayNode) { results.add(jsonNode.textValue()); } } else if (json.get("language").isTextual()) { results.add(json.get("language").textValue()); } } if (results.isEmpty()) { results.add(SupportedWord2VecLanguages.en_core_web_lg.name()); } return results; } }
4,388
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseSlideWindowSize.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.fasterxml.jackson.databind.JsonNode; public class ParseSlideWindowSize { private final JsonNode json; private final ParsingContext context; public ParseSlideWindowSize(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public int parseSlideWindowSize() { if (json.has("slide_window_size")) { if (json.path("slide_window_size").isInt()){ return json.path("slide_window_size").asInt(); } else { throw ErrorMessageHelper.errorParsingField("slide_window_size", context, "an integer"); } } else { return 0; } } }
4,389
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ErrorMessageHelper.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; public class ErrorMessageHelper { public static IllegalArgumentException invalidFieldValue(String fieldName, String value, ParsingContext context, Collection<String> validValues) { return new IllegalArgumentException(String.format("Invalid '%s' value for %s: '%s'. Valid values are: %s.", fieldName, context, value, ErrorMessageHelper.quoteList(validValues))); } public static IllegalArgumentException errorParsingField(String fieldName, ParsingContext context, String expected) { return new IllegalArgumentException(String.format("Error parsing '%s' field for %s. Expected %s.", fieldName, context, expected)); } public static String quoteList(Collection<String> values) { return values.stream().map(s -> String.format("'%s'", s)).collect(Collectors.joining(", ")); } public static String quoteList(List<Enum<?>> enums) { return quoteList(enumNames(enums)); } public static Collection<String> enumNames(List<Enum<?>> enums) { return enums.stream().map(Enum::name).collect(Collectors.toList()); } }
4,390
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseNodeType.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.propertygraph.Label; import com.fasterxml.jackson.databind.JsonNode; import java.util.ArrayList; import java.util.Collection; public class ParseNodeType { private final JsonNode json; private final ParsingContext context; public ParseNodeType(JsonNode json, ParsingContext context) { this.json = json; this.context = context; } public Label parseNodeType(){ if (json.has("node")){ JsonNode node = json.get("node"); if (node.isTextual()){ return new Label(node.textValue()); } else if (node.isArray()){ Collection<String> values = new ArrayList<>(); for (JsonNode element : node) { values.add(element.textValue()); } return new Label(values); } else { throw error(); } } else { throw error(); } } private IllegalArgumentException error() { return ErrorMessageHelper.errorParsingField("node", context, "a text value or array of text values"); } }
4,391
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParsingContext.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMLSourceDataModel; import com.amazonaws.services.neptune.propertygraph.Label; import org.apache.commons.lang.StringUtils; import java.util.Collection; import java.util.Collections; public class ParsingContext { private final String description; private final Label label; private final Collection<String> properties; private final NeptuneMLSourceDataModel dataModel; private ParsingContext(String description, Label label, Collection<String> properties, NeptuneMLSourceDataModel dataModel) { this.description = description; this.label = label; this.properties = properties; this.dataModel = dataModel; } public ParsingContext(String description, NeptuneMLSourceDataModel dataModel) { this(description, null, Collections.emptyList(), dataModel); } public ParsingContext(String description) { this(description, null, Collections.emptyList(), NeptuneMLSourceDataModel.PropertyGraph); } public ParsingContext withLabel(Label label) { return new ParsingContext(description, label, properties, dataModel); } public ParsingContext withProperties(Collection<String> properties) { return new ParsingContext(description, label, properties, dataModel); } public ParsingContext withProperty(String property) { if (StringUtils.isNotEmpty(property)){ return new ParsingContext(description, label, Collections.singleton(property), dataModel); } else { return this; } } @Override public String toString() { if (label != null && properties.size() == 1) { return String.format("%s (%s: %s, %s: %s)", description, dataModel.nodeTypeName(), label.allLabelsAsArrayString(), dataModel.nodeAttributeNameSingular(), properties.iterator().next()); } else if (label != null && !properties.isEmpty()) { return String.format("%s (%s: %s, %s: [%s])", description, dataModel.nodeTypeName(), label.allLabelsAsArrayString(), dataModel.nodeAttributeNamePlural(), String.join(", ", properties)); } else if (label != null) { return String.format("%s (%s: %s)", description, dataModel.nodeTypeName(), label.allLabelsAsArrayString()); } else { return description; } } }
4,392
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/profiles/neptune_ml/common/parsing/ParseRange.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.profiles.neptune_ml.common.parsing; import com.amazonaws.services.neptune.profiles.neptune_ml.common.config.Range; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; public class ParseRange { private final JsonNode json; private final String fieldName; private final ParsingContext context; public ParseRange(JsonNode json, String fieldName, ParsingContext context) { this.json = json; this.fieldName = fieldName; this.context = context; } public Range parseRange() { if (json.has(fieldName) && json.path(fieldName).isArray()) { ArrayNode rangeNode = (ArrayNode) json.path(fieldName); if (rangeNode.size() != 2) { throw error(); } if (!rangeNode.get(0).isNumber() || !rangeNode.get(1).isNumber()) { throw error(); } return new Range(rangeNode.get(0).numberValue(), rangeNode.get(1).numberValue()); } else { throw error(); } } private IllegalArgumentException error() { return (ErrorMessageHelper.errorParsingField(fieldName, context, "an array with 2 numeric values")); } }
4,393
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/export/EndpointValidator.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.export; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; public class EndpointValidator { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(EndpointValidator.class); public static Collection<String> validate(Collection<String> endpoints) { Collection<String> validatedEndpoints = new ArrayList<>(); for (String endpoint : endpoints) { validatedEndpoints.add(validate(endpoint)); } return validatedEndpoints; } public static String validate(String endpoint) { if (endpoint.startsWith("ws://") || endpoint.startsWith("wss://") || endpoint.startsWith("http://") || endpoint.startsWith("https://")) { logger.warn("Endpoint cannot contain protocol. Removing protocol: {}", endpoint); endpoint = endpoint.substring(endpoint.indexOf("//") + 2); } if (endpoint.contains(":")) { logger.warn("Endpoint cannot contain port. Removing port: {}", endpoint); endpoint = endpoint.substring(0, endpoint.indexOf(":")); } return endpoint; } }
4,394
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/export/FeatureToggles.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.export; import java.util.Collection; public class FeatureToggles { private final Collection<FeatureToggle> features; public FeatureToggles(Collection<FeatureToggle> features) { this.features = features; } public boolean containsFeature(FeatureToggle feature) { return features.contains(feature); } }
4,395
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/export/Logger.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.export; public interface Logger { void log(String s); }
4,396
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/export/NeptuneExportEventHandler.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.export; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; public interface NeptuneExportEventHandler { NeptuneExportEventHandler NULL_EVENT_HANDLER = new NeptuneExportEventHandler() { @Override public void onError() { // Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { //Do nothing } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { //Do nothing } }; void onError(); void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception; void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception; }
4,397
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/export/NeptuneExportService.java
/* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.export; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.neptune.profiles.incremental_export.IncrementalExportEventHandler; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMachineLearningExportEventHandlerV1; import com.amazonaws.services.neptune.profiles.neptune_ml.NeptuneMachineLearningExportEventHandlerV2; import com.amazonaws.services.neptune.util.EnvironmentVariableUtils; import com.amazonaws.services.neptune.util.S3ObjectInfo; import com.amazonaws.services.neptune.util.TransferManagerWrapper; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.ListObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.Tag; import com.amazonaws.services.s3.transfer.Download; import com.amazonaws.services.s3.transfer.TransferManager; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.*; public class NeptuneExportService { private static final org.slf4j.Logger logger = LoggerFactory.getLogger(NeptuneExportService.class); public static final List<Tag> NEPTUNE_EXPORT_TAGS = Collections.singletonList(new Tag("application", "neptune-export")); public static final String NEPTUNE_ML_PROFILE_NAME = "neptune_ml"; public static final String INCREMENTAL_EXPORT_PROFILE_NAME = "incremental_export"; public static final int MAX_FILE_DESCRIPTOR_COUNT = 9000; private final String cmd; private final String localOutputPath; private final boolean cleanOutputPath; private final String outputS3Path; private final boolean createExportSubdirectory; private final boolean overwriteExisting; private final boolean uploadToS3OnError; private final String configFileS3Path; private final String queriesFileS3Path; private final String completionFileS3Path; private final ObjectNode completionFilePayload; private final ObjectNode additionalParams; private final int maxConcurrency; private final String s3Region; private final int maxFileDescriptorCount; private final String sseKmsKeyId; private final AWSCredentialsProvider s3CredentialsProvider; public NeptuneExportService(String cmd, String localOutputPath, boolean cleanOutputPath, String outputS3Path, boolean createExportSubdirectory, boolean overwriteExisting, boolean uploadToS3OnError, String configFileS3Path, String queriesFileS3Path, String completionFileS3Path, ObjectNode completionFilePayload, ObjectNode additionalParams, int maxConcurrency, String s3Region, int maxFileDescriptorCount, String sseKmsKeyId, AWSCredentialsProvider s3CredentialsProvider) { this.cmd = cmd; this.localOutputPath = localOutputPath; this.cleanOutputPath = cleanOutputPath; this.outputS3Path = outputS3Path; this.createExportSubdirectory = createExportSubdirectory; this.overwriteExisting = overwriteExisting; this.uploadToS3OnError = uploadToS3OnError; this.configFileS3Path = configFileS3Path; this.queriesFileS3Path = queriesFileS3Path; this.completionFileS3Path = completionFileS3Path; this.completionFilePayload = completionFilePayload; this.additionalParams = additionalParams; this.maxConcurrency = maxConcurrency; this.s3Region = s3Region; this.maxFileDescriptorCount = maxFileDescriptorCount; this.sseKmsKeyId = sseKmsKeyId; this.s3CredentialsProvider = s3CredentialsProvider; } public S3ObjectInfo execute() throws IOException { Args args; try { args = new Args(cmd); if (StringUtils.isNotEmpty(configFileS3Path)) { args.removeOptions("-c", "--config-file"); } if (StringUtils.isNotEmpty(queriesFileS3Path)) { args.removeOptions("--queries"); } if (args.contains("create-pg-config") || args.contains("export-pg") || args.contains("export-pg-from-config") || args.contains("export-pg-from-queries") || args.contains("export-rdf")) { args.removeOptions("-d", "--dir"); args.addOption("-d", new File(localOutputPath, "output").getAbsolutePath()); if (maxConcurrency > 0 && !args.contains("--clone-cluster-max-concurrency")) { args.addOption("--clone-cluster-max-concurrency", String.valueOf(maxConcurrency)); } if (!args.contains("--clone-cluster-correlation-id")){ String correlationId = EnvironmentVariableUtils.getOptionalEnv("AWS_BATCH_JOB_ID", null); if (StringUtils.isNotEmpty(correlationId)){ args.addOption("--clone-cluster-correlation-id", correlationId); } } } } catch (Exception e) { throw new RuntimeException(e); } try (TransferManagerWrapper transferManager = new TransferManagerWrapper(s3Region, s3CredentialsProvider)) { if (cleanOutputPath) { clearTempFiles(); } if (StringUtils.isNotEmpty(configFileS3Path)) { updateArgs(args, "--config-file", downloadFile(transferManager.get(), configFileS3Path)); } if (StringUtils.isNotEmpty(queriesFileS3Path)) { updateArgs(args, "--queries", downloadFile(transferManager.get(), queriesFileS3Path)); } } if (additionalParams.has(NEPTUNE_ML_PROFILE_NAME) && (!args.contains("--profile", NEPTUNE_ML_PROFILE_NAME))) { args.addOption("--profile", NEPTUNE_ML_PROFILE_NAME); } Collection<String> profiles = args.getOptionValues("--profile"); if (!createExportSubdirectory && !overwriteExisting) { checkS3OutputIsEmpty(); } EventHandlerCollection eventHandlerCollection = new EventHandlerCollection(); Collection<CompletionFileWriter> completionFileWriters = new ArrayList<>(); ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams = new ExportToS3NeptuneExportEventHandler.S3UploadParams() .setCreateExportSubdirectory(createExportSubdirectory) .setOverwriteExisting(overwriteExisting); ExportToS3NeptuneExportEventHandler exportToS3EventHandler = new ExportToS3NeptuneExportEventHandler( localOutputPath, outputS3Path, s3Region, completionFileS3Path, completionFilePayload, uploadToS3OnError, s3UploadParams, profiles, completionFileWriters, sseKmsKeyId, s3CredentialsProvider); eventHandlerCollection.addHandler(exportToS3EventHandler); if (profiles.contains(NEPTUNE_ML_PROFILE_NAME)) { JsonNode neptuneMlNode = additionalParams.path(NEPTUNE_ML_PROFILE_NAME); boolean useV2 = args.contains("--feature-toggle", FeatureToggle.NeptuneML_V2.name()) || (neptuneMlNode.has("version") && neptuneMlNode.get("version").textValue().startsWith("v2.")); boolean useV1 = (neptuneMlNode.has("version") && neptuneMlNode.get("version").textValue().startsWith("v1.")); if (useV1) { NeptuneMachineLearningExportEventHandlerV1 neptuneMlEventHandler = new NeptuneMachineLearningExportEventHandlerV1( outputS3Path, s3Region, createExportSubdirectory, additionalParams, args, profiles, sseKmsKeyId, s3CredentialsProvider); eventHandlerCollection.addHandler(neptuneMlEventHandler); } else { NeptuneMachineLearningExportEventHandlerV2 neptuneMlEventHandler = new NeptuneMachineLearningExportEventHandlerV2( outputS3Path, s3Region, createExportSubdirectory, additionalParams, args, profiles, sseKmsKeyId, s3CredentialsProvider); eventHandlerCollection.addHandler(neptuneMlEventHandler); } } if (profiles.contains(INCREMENTAL_EXPORT_PROFILE_NAME)) { IncrementalExportEventHandler incrementalExportEventHandler = new IncrementalExportEventHandler(additionalParams); completionFileWriters.add(incrementalExportEventHandler); eventHandlerCollection.addHandler(incrementalExportEventHandler); } /** * We are removing a buffer of 1000 for maxFileDescriptorCount used at {@link com.amazonaws.services.neptune.propertygraph.io.LabelWriters#put} * since the value received from neptune-export service is set as the `nofile` ulimit in the AWS Batch * container properties and there might be other processes on the container having open files. * This ensures we close the leastRecentlyAccessed files before exceeding the hard limit for `nofile` ulimit. */ final int maxFileDescriptorCountAfterRemovingBuffer = Math.max(maxFileDescriptorCount - 1000, MAX_FILE_DESCRIPTOR_COUNT); eventHandlerCollection.onBeforeExport(args, s3UploadParams); logger.info("Args after service init: {}", String.join(" ", args.values())); new NeptuneExportRunner(args.values(), eventHandlerCollection, false, maxFileDescriptorCountAfterRemovingBuffer).run(); return exportToS3EventHandler.result(); } private void checkS3OutputIsEmpty() { AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient(); S3ObjectInfo s3ObjectInfo = new S3ObjectInfo(outputS3Path); ObjectListing listing = s3.listObjects( new ListObjectsRequest( s3ObjectInfo.bucket(), s3ObjectInfo.key(), null, null, 1)); if (!listing.getObjectSummaries().isEmpty()) { throw new IllegalStateException(String.format("S3 destination contains existing objects: %s. Set 'overwriteExisting' parameter to 'true' to allow overwriting existing objects.", outputS3Path)); } } private void clearTempFiles() throws IOException { File directory = new File(localOutputPath); if (directory.exists() && directory.isDirectory()) { FileUtils.deleteDirectory(directory); } } private void updateArgs(Args args, String option, Object value) { if (value != null) { args.addOption(option, value.toString()); } } private File downloadFile(TransferManager transferManager, String s3Path) { if (StringUtils.isEmpty(s3Path)) { return null; } S3ObjectInfo configFileS3ObjectInfo = new S3ObjectInfo(s3Path); File file = configFileS3ObjectInfo.createDownloadFile(localOutputPath); logger.info("Bucket: " + configFileS3ObjectInfo.bucket()); logger.info("Key : " + configFileS3ObjectInfo.key()); logger.info("File : " + file); Download download = transferManager.download( configFileS3ObjectInfo.bucket(), configFileS3ObjectInfo.key(), file); try { download.waitForCompletion(); } catch (InterruptedException e) { logger.warn(e.getMessage()); Thread.currentThread().interrupt(); } return file.getAbsoluteFile(); } }
4,398
0
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune
Create_ds/neptune-export/src/main/java/com/amazonaws/services/neptune/export/EventHandlerCollection.java
/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.amazonaws.services.neptune.export; import com.amazonaws.services.neptune.cluster.Cluster; import com.amazonaws.services.neptune.io.Directories; import com.amazonaws.services.neptune.propertygraph.ExportStats; import com.amazonaws.services.neptune.propertygraph.schema.GraphSchema; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public class EventHandlerCollection implements NeptuneExportServiceEventHandler { private final List<NeptuneExportEventHandler> exportHandlers = new ArrayList<>(); private final List<NeptuneExportServiceEventHandler> serviceHandlers = new ArrayList<>(); private static final org.slf4j.Logger logger = LoggerFactory.getLogger(EventHandlerCollection.class); public <T extends NeptuneExportEventHandler> void addHandler(T handler){ exportHandlers.add(handler); if (NeptuneExportServiceEventHandler.class.isAssignableFrom(handler.getClass())){ serviceHandlers.add((NeptuneExportServiceEventHandler) handler); } } @Override public void onError() { for (NeptuneExportEventHandler handler : exportHandlers) { try { handler.onError(); } catch (Exception e) { logger.warn("Error while handling export error with {}", handler.getClass().getSimpleName(), e); } } } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster) throws Exception { boolean error = false; for (int i = exportHandlers.size(); i-- > 0; ) { NeptuneExportEventHandler handler = exportHandlers.get(i); try { handler.onExportComplete(directories, stats, cluster); } catch (Exception e) { error = true; logger.error("Error while executing {}", handler.getClass().getSimpleName(), e); } } if (error){ throw new RuntimeException("One or more errors occurred while executing onExportComplete event handlers. See the logs for details."); } } @Override public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception { boolean error = false; for (int i = exportHandlers.size(); i-- > 0; ) { NeptuneExportEventHandler handler = exportHandlers.get(i); try { handler.onExportComplete(directories, stats, cluster, graphSchema); } catch (Exception e) { error = true; logger.error("Error while executing {}", handler.getClass().getSimpleName(), e); } } if (error){ throw new RuntimeException("One or more errors occurred while executing onExportComplete event handlers. See the logs for details."); } } @Override public void onBeforeExport(Args args, ExportToS3NeptuneExportEventHandler.S3UploadParams s3UploadParams) { boolean error = false; for (NeptuneExportServiceEventHandler handler : serviceHandlers) { try { handler.onBeforeExport(args, s3UploadParams); } catch (Exception e) { error = true; logger.error("Error while executing {}", handler.getClass().getSimpleName(), e); } } if (error){ throw new RuntimeException("One or more errors occurred while executing onBeforeExport event handlers. See the logs for details."); } } }
4,399