index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/AbstractModel.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.MulticlassScoringResult;
import com.airbnb.aerosolve.core.util.FloatVector;
import lombok.Getter;
import lombok.Setter;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by hector_yee on 8/25/14.
* Base class for models
*/
public abstract class AbstractModel implements Model, Serializable {
private static final long serialVersionUID = -5011350794437028492L;
@Getter @Setter
protected double offset = 0.0;
@Getter @Setter
protected double slope = 1.0;
// Scores a single item. The transforms should already have been applied to
// the context and item and combined item.
abstract public float scoreItem(FeatureVector combinedItem);
// Debug scores a single item. These are explanations for why a model
// came up with the score.
abstract public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder);
abstract public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem);
// Loads model from a buffered stream.
abstract protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException;
abstract public void save(BufferedWriter writer) throws IOException;
// returns probability: 1 / (1 + exp(-(offset + scale * score))
public double scoreProbability(float score) {
return scoreProbability(score, offset, slope);
}
public static double scoreProbability(float score, double offset, double slope) {
return 1.0 / (1.0 + Math.exp(-(offset + slope * score)));
}
// Optional method for multi-class classifiers.
public ArrayList<MulticlassScoringResult> scoreItemMulticlass(FeatureVector combinedItem) {
assert(false);
return new ArrayList<MulticlassScoringResult>();
}
// Populates a multiclass result probability using softmax over the scores by default.
// Some models might override this if their scores are already probabilities e.g. random forests.
public void scoreToProbability(ArrayList<MulticlassScoringResult> results) {
FloatVector vec = new FloatVector(results.size());
for (int i = 0; i < results.size(); i++) {
vec.values[i] = (float) results.get(i).score;
}
vec.softmax();
for (int i = 0; i < results.size(); i++) {
results.get(i).probability = vec.values[i];
}
}
// Optional method implemented by online updatable models e.g. Spline, RBF
public void onlineUpdate(float grad, float learningRate, Map<String, Map<String, Double>> flatFeatures) {
assert(false);
}
// Helper function for FOBOS updates.
// http://papers.nips.cc/paper/3793-efficient-learning-using-forward-backward-splitting.pdf
public static float fobosUpdate(
float currWeight,
float gradient,
float learningRate,
float l1Reg,
float l2Reg,
float ssg) {
float etaT = learningRate / (float) Math.sqrt(ssg);
float etaTHalf = learningRate / (float) Math.sqrt(ssg + 0.5);
// FOBOS l2 regularization
float wt = (currWeight - gradient * etaT) / (1.0f + l2Reg * etaTHalf);
// FOBOS l1 regularization
float sign = 0.0f;
if (wt > 0.0) {
sign = 1.0f;
} else {
sign = -1.0f;
}
float step = (float) Math.max(0.0, Math.abs(wt) - l1Reg * etaTHalf);
return sign * step;
}
}
| 7,200 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/LinearModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.util.Util;
import com.google.common.hash.HashCode;
import lombok.Getter;
import lombok.Setter;
import org.apache.http.annotation.NotThreadSafe;
/**
* A linear model backed by a hash map.
*/
@NotThreadSafe
public class LinearModel extends AbstractModel {
@Getter @Setter
protected Map<String, Map<String, Float>> weights;
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Set<String>> stringFeatures = combinedItem.getStringFeatures();
if (stringFeatures == null || weights == null) {
return 0.0f;
}
float sum = 0.0f;
for (Entry<String, Set<String>> entry : stringFeatures.entrySet()) {
String family = entry.getKey();
Map<String, Float> inner = weights.get(family);
if (inner == null) {
continue;
}
for (String value : entry.getValue()) {
Float weight = inner.get(value);
if (weight != null) {
sum += weight;
}
}
}
return sum;
}
public static class EntryComparator implements Comparator<Entry<String, Float>> {
@Override
public int compare(Entry<String, Float> e1, Entry<String, Float> e2) {
float v1 = Math.abs(e1.getValue());
float v2 = Math.abs(e2.getValue());
if (v1 > v2) {
return -1;
} else if (v1 < v2) {
return 1;
}
return 0;
}
}
// Debug scores a single item. These are explanations for why a model
// came up with the score.
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
Map<String, Set<String>> stringFeatures = combinedItem.getStringFeatures();
if (stringFeatures == null || weights == null) {
return 0.0f;
}
float sum = 0.0f;
PriorityQueue<Entry<String, Float>> scores = new PriorityQueue<>(100, new EntryComparator());
Map<String, Float> familyScores = new HashMap<>();
for (Entry<String, Set<String>> entry : stringFeatures.entrySet()) {
String family = entry.getKey();
for (String value : entry.getValue()) {
HashCode code = Util.getHashCode(family, value);
Map<String, Float> inner = weights.get(family);
if (inner != null) {
Float weight = inner.get(value);
if (weight != null) {
String str = family + ':' + value + " = " + weight + '\n';
if (familyScores.containsKey(family)) {
Float wt = familyScores.get(family);
familyScores.put(family, wt + weight);
} else {
familyScores.put(family, weight);
}
AbstractMap.SimpleEntry<String, Float> ent = new AbstractMap.SimpleEntry<String, Float>(
str, weight);
scores.add(ent);
sum += weight;
}
}
}
}
builder.append("Scores by family ===>\n");
if (!familyScores.isEmpty()) {
PriorityQueue<Entry<String, Float>> familyPQ = new PriorityQueue<>(10, new EntryComparator());
for (Entry<String, Float> entry : familyScores.entrySet()) {
familyPQ.add(entry);
}
while (!familyPQ.isEmpty()) {
Entry<String, Float> entry = familyPQ.poll();
builder.append(entry.getKey() + " = " + entry.getValue() + '\n');
}
}
builder.append("Top 15 scores ===>\n");
if (!scores.isEmpty()) {
int count = 0;
float subsum = 0.0f;
while (!scores.isEmpty()) {
Entry<String, Float> entry = scores.poll();
builder.append(entry.getKey());
float val = entry.getValue();
subsum += val;
count = count + 1;
if (count >= 15) {
builder.append("Leftover = " + (sum - subsum) + '\n');
break;
}
}
}
builder.append("Total = " + sum + '\n');
return sum;
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
// linear model takes only string features
Map<String, Set<String>> stringFeatures = combinedItem.getStringFeatures();
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
if (stringFeatures == null || weights == null) {
return scoreRecordsList;
}
for (Entry<String, Set<String>> entry : stringFeatures.entrySet()) {
String family = entry.getKey();
Map<String, Float> inner = weights.get(family);
if (inner == null) continue;
for (String value : entry.getValue()) {
DebugScoreRecord record = new DebugScoreRecord();
Float weight = inner.get(value);
if (weight != null) {
record.setFeatureFamily(family);
record.setFeatureName(value);
// 1.0 if the string feature exists, 0.0 otherwise
record.setFeatureValue(1.0);
record.setFeatureWeight(weight);
scoreRecordsList.add(record);
}
}
}
return scoreRecordsList;
}
// Loads model from a buffered stream.
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
// Very old models did not set slope and offset so check first.
if (header.isSetSlope()) {
slope = header.getSlope();
}
if (header.isSetOffset()) {
offset = header.getOffset();
}
weights = new HashMap<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, Float> inner = weights.get(family);
if (inner == null) {
inner = new HashMap<>();
weights.put(family, inner);
}
float weight = (float) record.getFeatureWeight();
inner.put(name, weight);
}
}
// save model
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("linear");
header.setSlope(slope);
header.setOffset(offset);
long count = 0;
for (Map.Entry<String, Map<String, Float>> familyMap : weights.entrySet()) {
for (Map.Entry<String, Float> feature : familyMap.getValue().entrySet()) {
count++;
}
}
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (Map.Entry<String, Map<String, Float>> familyMap : weights.entrySet()) {
for (Map.Entry<String, Float> feature : familyMap.getValue().entrySet()) {
ModelRecord record = new ModelRecord();
record.setFeatureFamily(familyMap.getKey());
record.setFeatureName(feature.getKey());
record.setFeatureWeight(feature.getValue());
writer.write(Util.encode(record));
writer.newLine();
}
}
writer.flush();
}
}
| 7,201 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/KernelModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import java.util.List;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.PriorityQueue;
import java.util.AbstractMap;
import com.airbnb.aerosolve.core.DictionaryRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.util.StringDictionary;
import com.airbnb.aerosolve.core.util.FloatVector;
import com.airbnb.aerosolve.core.util.SupportVector;
import lombok.Getter;
import lombok.Setter;
// A kernel machine with arbitrary kernels. Different support vectors can have different kernels.
// The conversion from sparse features to dense features is done by dictionary lookup. Also since
// non-linear kernels are used there is no need to cross features, the feature interactions are done by
// considering kernel responses to the support vectors. Try to keep features under a thousand.
public class KernelModel extends AbstractModel {
private static final long serialVersionUID = 7651061358422885397L;
@Getter @Setter
StringDictionary dictionary;
@Getter @Setter
List<SupportVector> supportVectors;
public KernelModel() {
dictionary = new StringDictionary();
supportVectors = new ArrayList<>();
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector vec = dictionary.makeVectorFromSparseFloats(flatFeatures);
float sum = 0.0f;
for (int i = 0; i < supportVectors.size(); i++) {
SupportVector sv = supportVectors.get(i);
sum += sv.evaluate(vec);
}
return sum;
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
return 0.0f;
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
// (TODO) implement debugScoreComponents
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
return scoreRecordsList;
}
@Override
public void onlineUpdate(float grad, float learningRate, Map<String, Map<String, Double>> flatFeatures) {
FloatVector vec = dictionary.makeVectorFromSparseFloats(flatFeatures);
float deltaG = - learningRate * grad;
for (SupportVector sv : supportVectors) {
float response = sv.evaluateUnweighted(vec);
float deltaW = deltaG * response;
sv.setWeight(sv.getWeight() + deltaW);
}
}
@Override
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("kernel");
header.setDictionary(dictionary.getDictionary());
long count = supportVectors.size();
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (SupportVector sv : supportVectors) {
writer.write(Util.encode(sv.toModelRecord()));
writer.newLine();
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
dictionary = new StringDictionary(header.getDictionary());
supportVectors = new ArrayList<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
supportVectors.add(new SupportVector(record));
}
}
}
| 7,202 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/KDTreeModel.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.KDTreeNode;
import com.airbnb.aerosolve.core.util.Util;
import com.google.common.base.Optional;
import lombok.Getter;
import org.apache.commons.codec.binary.Base64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import static com.airbnb.aerosolve.core.KDTreeNodeType.LEAF;
// A specialized 2D kd-tree that supports point and box queries.
public class KDTreeModel implements Serializable {
private static final long serialVersionUID = -2884260218927875695L;
private static final Logger log = LoggerFactory.getLogger(KDTreeModel.class);
@Getter
private KDTreeNode[] nodes;
public KDTreeModel(KDTreeNode[] nodes) {
this.nodes = nodes;
}
public KDTreeModel(List<KDTreeNode> nodeList) {
nodes = new KDTreeNode[nodeList.size()];
nodeList.toArray(nodes);
}
// Returns the indice of leaf containing the point.
public int leaf(double x, double y) {
if (nodes == null) return -1;
int currIdx = 0;
while (true) {
int nextIdx = next(currIdx, x, y);
if (nextIdx == -1) {
return currIdx;
} else {
currIdx = nextIdx;
}
}
}
public KDTreeNode getNode(int id) {
return nodes[id];
}
// Returns the indices of nodes traversed to get to the leaf containing the point.
public ArrayList<Integer> query(double x, double y) {
ArrayList<Integer> idx = new ArrayList<>();
if (nodes == null) return idx;
int currIdx = 0;
while (true) {
idx.add(currIdx);
int nextIdx = next(currIdx, x, y);
if (nextIdx == -1) {
return idx;
} else {
currIdx = nextIdx;
}
}
}
private int next(int currIdx, double x, double y) {
KDTreeNode node = nodes[currIdx];
int nextIndex = -1;
switch(node.nodeType) {
case X_SPLIT: {
if (x < node.splitValue) {
nextIndex = node.leftChild;
} else {
nextIndex = node.rightChild;
}
}
break;
case Y_SPLIT: {
if (y < node.splitValue) {
nextIndex = node.leftChild;
} else {
nextIndex = node.rightChild;
}
}
break;
default:
assert (node.nodeType == LEAF);
break;
}
return nextIndex;
}
// Returns the indices of all node overlapping the box
public ArrayList<Integer> queryBox(double minX, double minY, double maxX, double maxY) {
ArrayList<Integer> idx = new ArrayList<>();
if (nodes == null) return idx;
Stack<Integer> stack = new Stack<Integer>();
stack.push(0);
while (!stack.isEmpty()) {
int currIdx = stack.pop();
idx.add(currIdx);
KDTreeNode node = nodes[currIdx];
switch (node.nodeType) {
case X_SPLIT: {
if (minX < node.splitValue) {
stack.push(node.leftChild);
}
if (maxX >= node.splitValue) {
stack.push(node.rightChild);
}
}
break;
case Y_SPLIT: {
if (minY < node.splitValue) {
stack.push(node.leftChild);
}
if (maxY >= node.splitValue) {
stack.push(node.rightChild);
}
}
case LEAF:
break;
}
}
return idx;
}
public static Optional<KDTreeModel> readFromGzippedStream(InputStream inputStream) {
List<KDTreeNode> nodes = Util.readFromGzippedStream(KDTreeNode.class, inputStream);
if (!nodes.isEmpty()) {
return Optional.of(new KDTreeModel(nodes));
} else {
return Optional.absent();
}
}
public static Optional<KDTreeModel> readFromGzippedResource(String name) {
InputStream inputStream = java.lang.ClassLoader.getSystemResourceAsStream(name);
Optional<KDTreeModel> modelOptional = readFromGzippedStream(inputStream);
if (!modelOptional.isPresent()) {
log.error("Could not load resource named " + name);
}
return modelOptional;
}
public static Optional<KDTreeModel> readFromGzippedBase64String(String encoded) {
byte[] decoded = Base64.decodeBase64(encoded);
InputStream stream = new ByteArrayInputStream(decoded);
return readFromGzippedStream(stream);
}
// Strips nodes for queries. To save space we just store the minimum amount of data.
public static KDTreeNode stripNode(KDTreeNode node) {
KDTreeNode newNode = new KDTreeNode();
if (node.isSetNodeType()) {
newNode.setNodeType(node.nodeType);
}
if (node.isSetSplitValue()) {
newNode.setSplitValue(node.splitValue);
}
if (node.isSetLeftChild()) {
newNode.setLeftChild(node.leftChild);
}
if (node.isSetRightChild()) {
newNode.setRightChild(node.rightChild);
}
return newNode;
}
}
| 7,203 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/SplineModel.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.function.Spline;
import com.airbnb.aerosolve.core.util.Util;
import lombok.Getter;
import lombok.Setter;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
// A linear piecewise spline based model with a spline per feature.
// See http://en.wikipedia.org/wiki/Generalized_additive_model
/*
@deprecated Use AdditiveModel
*/
@Deprecated
public class SplineModel extends AbstractModel {
private static final long serialVersionUID = 3651061358422885397L;
@Getter @Setter
private int numBins;
@Getter @Setter
private Map<String, Map<String, WeightSpline>> weightSpline;
@Getter @Setter
// Cap on the L_infinity norm of the spline. Defaults to 0 which is no cap.
private float splineNormCap;
public static class WeightSpline implements Serializable {
private static final long serialVersionUID = -2884260218927875694L;
public WeightSpline() {
}
public WeightSpline(float minVal, float maxVal, int numBins) {
splineWeights = new float[numBins];
spline = new Spline(minVal, maxVal, splineWeights);
}
public void resample(int newBins) {
spline.resample(newBins);
splineWeights = spline.getWeights();
}
public Spline spline;
public float[] splineWeights;
public float L1Norm() {
float sum = 0.0f;
for (int i = 0; i < splineWeights.length; i++) {
sum += Math.abs(splineWeights[i]);
}
return sum;
}
public float LInfinityNorm() {
float best = 0.0f;
for (int i = 0; i < splineWeights.length; i++) {
best = Math.max(best, Math.abs(splineWeights[i]));
}
return best;
}
public void LInfinityCap(float cap) {
if (cap <= 0.0f) return;
float currentNorm = this.LInfinityNorm();
if (currentNorm > cap) {
float scale = cap / currentNorm;
for (int i = 0; i < splineWeights.length; i++) {
splineWeights[i] *= scale;
}
}
}
}
public SplineModel() {
}
public void initForTraining(int numBins) {
this.numBins = numBins;
weightSpline = new HashMap<>();
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
return scoreFlatFeatures(flatFeatures);
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
float sum = 0.0f;
PriorityQueue<Map.Entry<String, Float>> scores =
new PriorityQueue<>(100, new LinearModel.EntryComparator());
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, WeightSpline> familyWeightMap = weightSpline.get(featureFamily.getKey());
if (familyWeightMap == null) continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
WeightSpline ws = familyWeightMap.get(feature.getKey());
if (ws == null) continue;
float val = feature.getValue().floatValue();
float subscore = ws.spline.evaluate(val);
sum += subscore;
String str = featureFamily.getKey() + ":" + feature.getKey() + "=" + val
+ " = " + subscore + "<br>\n";
scores.add(new AbstractMap.SimpleEntry<String, Float>(str, subscore));
}
}
final int MAX_COUNT = 100;
builder.append("Top scores ===>\n");
if (!scores.isEmpty()) {
int count = 0;
float subsum = 0.0f;
while (!scores.isEmpty()) {
Map.Entry<String, Float> entry = scores.poll();
builder.append(entry.getKey());
float val = entry.getValue();
subsum += val;
count = count + 1;
if (count >= MAX_COUNT) {
builder.append("Leftover = " + (sum - subsum) + '\n');
break;
}
}
}
builder.append("Total = " + sum + '\n');
return sum;
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, WeightSpline> familyWeightMap = weightSpline.get(featureFamily.getKey());
if (familyWeightMap == null) continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
WeightSpline ws = familyWeightMap.get(feature.getKey());
if (ws == null) continue;
float val = feature.getValue().floatValue();
float weight = ws.spline.evaluate(val);
DebugScoreRecord record = new DebugScoreRecord();
record.setFeatureFamily(featureFamily.getKey());
record.setFeatureName(feature.getKey());
record.setFeatureValue(val);
record.setFeatureWeight(weight);
scoreRecordsList.add(record);
}
}
return scoreRecordsList;
}
// Updates the gradient
public void update(float grad,
float learningRate,
Map<String, Map<String, Double>> flatFeatures) {
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, WeightSpline> familyWeightMap = weightSpline.get(featureFamily.getKey());
if (familyWeightMap == null) continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
WeightSpline ws = familyWeightMap.get(feature.getKey());
if (ws == null) continue;
float val = feature.getValue().floatValue();
updateWeightSpline(val, grad, learningRate,ws);
}
}
}
@Override
public void onlineUpdate(float grad, float learningRate, Map<String, Map<String, Double>> flatFeatures) {
update(grad, learningRate, flatFeatures);
}
// Adds a new spline
public void addSpline(String family, String feature, float minVal, float maxVal, Boolean overwrite) {
// if overwrite=true, we overwrite an existing spline, otherwise we don't modify an existing spline
Map<String, WeightSpline> featFamily = weightSpline.get(family);
if (featFamily == null) {
featFamily = new HashMap<>();
weightSpline.put(family, featFamily);
}
if (overwrite || !featFamily.containsKey(feature)) {
if (maxVal <= minVal) {
maxVal = minVal + 1.0f;
}
WeightSpline ws = new WeightSpline(minVal, maxVal, numBins);
featFamily.put(feature, ws);
}
}
private void updateWeightSpline(float val,
float grad,
float learningRate,
WeightSpline ws) {
ws.spline.update(-grad * learningRate, val);
ws.LInfinityCap(splineNormCap);
}
public float scoreFlatFeatures(Map<String, Map<String, Double>> flatFeatures) {
float sum = 0.0f;
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, WeightSpline> familyWeightMap = weightSpline.get(featureFamily.getKey());
if (familyWeightMap == null)
continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
WeightSpline ws = familyWeightMap.get(feature.getKey());
if (ws == null)
continue;
float val = feature.getValue().floatValue();
sum += ws.spline.evaluate(val);
}
}
return sum;
}
@Override
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("spline");
header.setNumHidden(numBins);
header.setSlope(slope);
header.setOffset(offset);
long count = 0;
for (Map.Entry<String, Map<String, WeightSpline>> familyMap : weightSpline.entrySet()) {
for (Map.Entry<String, WeightSpline> feature : familyMap.getValue().entrySet()) {
count++;
}
}
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (Map.Entry<String, Map<String, WeightSpline>> familyMap : weightSpline.entrySet()) {
for (Map.Entry<String, WeightSpline> feature : familyMap.getValue().entrySet()) {
ModelRecord record = new ModelRecord();
record.setFeatureFamily(familyMap.getKey());
record.setFeatureName(feature.getKey());
ArrayList<Double> arrayList = new ArrayList<Double>();
for (int i = 0; i < feature.getValue().splineWeights.length; i++) {
arrayList.add((double) feature.getValue().splineWeights[i]);
}
record.setWeightVector(arrayList);
record.setMinVal(feature.getValue().spline.getMinVal());
record.setMaxVal(feature.getValue().spline.getMaxVal());
writer.write(Util.encode(record));
writer.newLine();
}
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
numBins = header.getNumHidden();
slope = header.getSlope();
offset = header.getOffset();
weightSpline = new HashMap<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, WeightSpline> inner = weightSpline.get(family);
if (inner == null) {
inner = new HashMap<>();
weightSpline.put(family, inner);
}
float minVal = (float) record.getMinVal();
float maxVal = (float) record.getMaxVal();
WeightSpline vec = new WeightSpline(minVal, maxVal, numBins);
for (int j = 0; j < numBins; j++) {
vec.splineWeights[j] = record.getWeightVector().get(j).floatValue();
}
inner.put(name, vec);
}
}
}
| 7,204 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/MaxoutModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.util.FloatVector;
import lombok.Getter;
import lombok.Setter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
// A 2 layer maxout unit that can represent functions using difference
// of piecewise linear convex functions.
// http://arxiv.org/abs/1302.4389
public class MaxoutModel extends AbstractModel {
private static final long serialVersionUID = -849900702679383422L;
@Getter @Setter
private int numHidden;
@Getter @Setter
private Map<String, Map<String, WeightVector>> weightVector;
private WeightVector bias;
public static class WeightVector implements Serializable {
private static final long serialVersionUID = -2698305146144718441L;
WeightVector() {
}
WeightVector(float scale, int dim, boolean gaussian) {
this.scale = scale;
if (gaussian) {
weights = FloatVector.getGaussianVector(dim);
} else {
weights = new FloatVector(dim);
}
ssg = new FloatVector(dim);
prevStep = new FloatVector(dim);
}
public FloatVector weights;
// Sum of squared gradients.
public FloatVector ssg;
// Previous step
public FloatVector prevStep;
public float scale;
}
public MaxoutModel() {
}
public void initForTraining(int numHidden) {
this.numHidden = numHidden;
weightVector = new HashMap<>();
bias = new WeightVector(1.0f, numHidden, false);
Map<String, WeightVector> special = new HashMap<>();
weightVector.put("$SPECIAL", special);
special.put("$BIAS", bias);
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
return scoreFlatFeatures(flatFeatures);
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector response = getResponse(flatFeatures);
FloatVector.MinMaxResult result = response.getMinMaxResult();
float sum = result.maxValue - result.minValue;
PriorityQueue<Map.Entry<String, Float>> scores =
new PriorityQueue<>(100, new LinearModel.EntryComparator());
float[] biasWt = bias.weights.getValues();
float biasScore = biasWt[result.maxIndex] - biasWt[result.minIndex];
scores.add(new AbstractMap.SimpleEntry<String, Float>(
"bias = " + biasScore + " <br>\n",
biasScore));
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, WeightVector> familyWeightMap = weightVector.get(featureFamily.getKey());
if (familyWeightMap == null) continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
WeightVector weightVec = familyWeightMap.get(feature.getKey());
if (weightVec == null) continue;
float val = feature.getValue().floatValue();
float[] wt = weightVec.weights.getValues();
float p = wt[result.maxIndex] * weightVec.scale;
float n = wt[result.minIndex] * weightVec.scale;
float subscore = val * (p - n);
String str = featureFamily.getKey() + ":" + feature.getKey() + "=" + val
+ " * (" + p + " - " + n + ") = " + subscore + "<br>\n";
scores.add(new AbstractMap.SimpleEntry<String, Float>(str, subscore));
}
}
builder.append("Top 15 scores ===>\n");
if (!scores.isEmpty()) {
int count = 0;
float subsum = 0.0f;
while (!scores.isEmpty()) {
Map.Entry<String, Float> entry = scores.poll();
builder.append(entry.getKey());
float val = entry.getValue();
subsum += val;
count = count + 1;
if (count >= 15) {
builder.append("Leftover = " + (sum - subsum) + '\n');
break;
}
}
}
builder.append("Total = " + sum + '\n');
return sum;
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
// (TODO) implement debugScoreComponents
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
return scoreRecordsList;
}
// Adds a new vector with a specified scale.
public void addVector(String family, String feature, float scale) {
Map<String, WeightVector> featFamily = weightVector.get(family);
if (featFamily == null) {
featFamily = new HashMap<>();
weightVector.put(family, featFamily);
}
WeightVector vec = new WeightVector(scale, numHidden, true);
featFamily.put(feature, vec);
}
// Updates the gradient
public void update(float grad,
float learningRate,
float l1Reg,
float l2Reg,
float momentum,
FloatVector.MinMaxResult result,
Map<String, Map<String, Double>> flatFeatures) {
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, WeightVector> familyWeightMap = weightVector.get(featureFamily.getKey());
if (familyWeightMap == null) continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
WeightVector weightVec = familyWeightMap.get(feature.getKey());
if (weightVec == null) continue;
float val = feature.getValue().floatValue() * weightVec.scale;
updateWeightVector(result.minIndex,
result.maxIndex,
val,
grad,
learningRate,
l1Reg,
l2Reg,
momentum,
weightVec);
}
}
updateWeightVector(result.minIndex,
result.maxIndex,
1.0f,
grad,
learningRate,
l1Reg,
l2Reg,
momentum,
bias);
}
private void updateWeightVector(int minIndex,
int maxIndex,
float val,
float grad,
float learningRate,
float l1Reg,
float l2Reg,
float momentum,
WeightVector weightVec) {
float[] ssg = weightVec.ssg.getValues();
float[] wt = weightVec.weights.getValues();
float[] prev = weightVec.prevStep.getValues();
ssg[maxIndex] += grad * grad;
ssg[minIndex] += grad * grad;
float newMax = fobosUpdate(wt[maxIndex],
grad * val,
learningRate,
l1Reg, l2Reg, ssg[maxIndex]);
float stepMax = newMax - wt[maxIndex];
if (newMax == 0.0f) {
wt[maxIndex] = 0.0f;
prev[maxIndex] = 0.0f;
} else {
wt[maxIndex] = wt[maxIndex] + stepMax + momentum * prev[maxIndex];
prev[maxIndex] = stepMax;
}
float newMin = fobosUpdate(wt[minIndex],
-grad * val,
learningRate,
l1Reg, l2Reg, ssg[minIndex]);
float stepMin = newMin - wt[minIndex];
if (newMin == 0.0f) {
wt[minIndex] = 0.0f;
prev[minIndex] = 0.0f;
} else {
wt[minIndex] = wt[minIndex] + stepMin + momentum * prev[minIndex];
prev[minIndex] = stepMin;
}
}
public float scoreFlatFeatures(Map<String, Map<String, Double>> flatFeatures) {
FloatVector response = getResponse(flatFeatures);
FloatVector.MinMaxResult result = response.getMinMaxResult();
return result.maxValue - result.minValue;
}
public FloatVector getResponse(Map<String, Map<String, Double>> flatFeatures) {
FloatVector sum = new FloatVector(numHidden);
for (Map.Entry<String, Map<String, Double>> entry : flatFeatures.entrySet()) {
Map<String, WeightVector> family = weightVector.get(entry.getKey());
if (family != null) {
for (Map.Entry<String, Double> feature : entry.getValue().entrySet()) {
WeightVector hidden = family.get(feature.getKey());
if (hidden != null) {
sum.multiplyAdd(feature.getValue().floatValue() * hidden.scale, hidden.weights);
}
}
}
}
sum.add(bias.weights);
return sum;
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("maxout");
header.setNumHidden(numHidden);
long count = 0;
for (Map.Entry<String, Map<String, WeightVector>> familyMap : weightVector.entrySet()) {
for (Map.Entry<String, WeightVector> feature : familyMap.getValue().entrySet()) {
count++;
}
}
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (Map.Entry<String, Map<String, WeightVector>> familyMap : weightVector.entrySet()) {
for (Map.Entry<String, WeightVector> feature : familyMap.getValue().entrySet()) {
ModelRecord record = new ModelRecord();
record.setFeatureFamily(familyMap.getKey());
record.setFeatureName(feature.getKey());
ArrayList<Double> arrayList = new ArrayList<Double>();
for (int i = 0; i < feature.getValue().weights.values.length; i++) {
arrayList.add((double) feature.getValue().weights.values[i]);
}
record.setWeightVector(arrayList);
record.setScale(feature.getValue().scale);
writer.write(Util.encode(record));
writer.newLine();
}
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
numHidden = header.getNumHidden();
weightVector = new HashMap<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, WeightVector> inner = weightVector.get(family);
if (inner == null) {
inner = new HashMap<>();
weightVector.put(family, inner);
}
WeightVector vec = new WeightVector();
vec.scale = (float) record.getScale();
vec.weights = new FloatVector(numHidden);
for (int j = 0; j < numHidden; j++) {
vec.weights.values[j] = record.getWeightVector().get(j).floatValue();
}
inner.put(name, vec);
}
Map<String, WeightVector> special = weightVector.get("$SPECIAL");
assert(special != null);
bias = special.get("$BIAS");
assert(bias != null);
}
} | 7,205 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/DecisionTreeModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.lang.StringBuilder;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import com.airbnb.aerosolve.core.*;
import com.airbnb.aerosolve.core.util.Util;
import lombok.Getter;
import lombok.Setter;
// A simple decision tree model.
public class DecisionTreeModel extends AbstractModel {
private static final long serialVersionUID = 3651061358422885379L;
@Getter @Setter
protected ArrayList<ModelRecord> stumps;
public DecisionTreeModel() {
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
return scoreFlattenedFeature(floatFeatures);
}
@Override
public ArrayList<MulticlassScoringResult> scoreItemMulticlass(FeatureVector combinedItem) {
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
return scoreFlattenedFeatureMulticlass(floatFeatures);
}
public float scoreFlattenedFeature(Map<String, Map<String, Double>> floatFeatures) {
int leaf = getLeafIndex(floatFeatures);
if (leaf < 0) return 0.0f;
ModelRecord stump = stumps.get(leaf);
return (float) stump.featureWeight;
}
public ArrayList<MulticlassScoringResult> scoreFlattenedFeatureMulticlass(Map<String, Map<String, Double>> floatFeatures) {
ArrayList<MulticlassScoringResult> results = new ArrayList<>();
int leaf = getLeafIndex(floatFeatures);
if (leaf < 0) return results;
ModelRecord stump = stumps.get(leaf);
if (stump.labelDistribution == null) return results;
for (Map.Entry<String, Double> entry : stump.labelDistribution.entrySet()) {
MulticlassScoringResult result = new MulticlassScoringResult();
result.setLabel(entry.getKey());
result.setScore(entry.getValue());
results.add(result);
}
return results;
}
public int getLeafIndex(Map<String, Map<String, Double>> floatFeatures) {
if (stumps.isEmpty()) return -1;
int index = 0;
while (true) {
ModelRecord stump = stumps.get(index);
if (!stump.isSetLeftChild() || !stump.isSetRightChild()) {
break;
}
boolean response = BoostedStumpsModel.getStumpResponse(stump, floatFeatures);
if (response) {
index = stump.rightChild;
} else {
index = stump.leftChild;
}
}
return index;
}
@Override
// Decision trees don't usually have debuggable components.
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
return 0.0f;
}
@Override
// Decision trees don't usually have debuggable components.
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
return scoreRecordsList;
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("decision_tree");
long count = stumps.size();
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (ModelRecord rec : stumps) {
writer.write(Util.encode(rec));
writer.newLine();
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
stumps = new ArrayList<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
stumps.add(record);
}
}
/*
* Returns a debuggable single tree in graphviz DOT format
*/
public String toDot() {
StringBuilder sb = new StringBuilder();
sb.append("digraph g {\n");
sb.append("graph [ rankdir = \"LR\" ]\n");
for (int i = 0; i < stumps.size(); i++) {
ModelRecord stump = stumps.get(i);
if (stump.isSetLeftChild()) {
sb.append(String.format("\"node%d\" [\n", i));
double thresh = stump.threshold;
sb.append(String.format(
"label = \"<f0> %s:%s | <f1> less than %f | <f2> greater than or equal%f\";\n",
stump.featureFamily,
stump.featureName,
thresh,
thresh));
sb.append("shape = \"record\";\n");
sb.append("];\n");
} else {
sb.append(String.format("\"node%d\" [\n", i));
if (stump.labelDistribution != null) {
sb.append(String.format("label = \"<f0> "));
for (Map.Entry<String, Double> entry : stump.labelDistribution.entrySet()) {
sb.append(String.format("%s : %f ", entry.getKey(), entry.getValue()));
}
sb.append(" \";\n");
} else {
sb.append(String.format("label = \"<f0> Weight %f\";\n", stump.featureWeight));
}
sb.append("shape = \"record\";\n");
sb.append("];\n");
}
}
int count = 0;
for (int i = 0; i < stumps.size(); i++) {
ModelRecord stump = stumps.get(i);
if (stump.isSetLeftChild()) {
sb.append(String.format("\"node%d\":f1 -> \"node%d\":f0 [ id = %d ];\n", i, stump.leftChild, count));
count = count + 1;
sb.append(String.format("\"node%d\":f2 -> \"node%d\":f0 [id = %d];\n", i, stump.rightChild, count));
count = count + 1;
}
}
sb.append("}\n");
return sb.toString();
}
// Returns the transform config in human readable form.
public String toHumanReadableTransform() {
StringBuilder sb = new StringBuilder();
sb.append(" nodes: [\n");
for (int i = 0; i < stumps.size(); i++) {
ModelRecord stump = stumps.get(i);
sb.append(" \"");
if (stump.isSetLeftChild()) {
// Parent node, node id, family, name, threshold, left, right
sb.append(
String.format("P,%d,%s,%s,%f,%d,%d", i,
stump.featureFamily,
stump.featureName,
stump.threshold,
stump.leftChild, stump.rightChild));
} else {
// Leaf node, node id, feature weight, human readable leaf name.
sb.append(String.format("L,%d,%f,LEAF_%d", i, stump.featureWeight, i));
}
sb.append("\"\n");
}
sb.append(" ]\n");
return sb.toString();
}
// Constructs a tree from human readable transform list.
public static DecisionTreeModel fromHumanReadableTransform(List<String> rows) {
DecisionTreeModel tree = new DecisionTreeModel();
ArrayList<ModelRecord> records = new ArrayList<>();
tree.setStumps(records);
for (String row : rows) {
ModelRecord rec = new ModelRecord();
records.add(rec);
String token[] = row.split(",");
if (token[0].contains("P")) {
// Parent node
rec.setFeatureFamily(token[2]);
rec.setFeatureName(token[3]);
rec.setThreshold(Double.parseDouble(token[4]));
rec.setLeftChild(Integer.parseInt(token[5]));
rec.setRightChild(Integer.parseInt(token[6]));
} else {
rec.setFeatureName(token[3]);
rec.setFeatureWeight(Double.parseDouble(token[2]));
}
}
return tree;
}
}
| 7,206 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/NDTreeModel.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.NDTreeNode;
import com.airbnb.aerosolve.core.util.Util;
import com.google.common.base.Optional;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.binary.Base64;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Stack;
/*
N-Dimensional KDTreeModel.
*/
@Slf4j
public class NDTreeModel implements Serializable {
private static final long serialVersionUID = -2884260218927875615L;
public static final int LEAF = -1;
@Getter
private final NDTreeNode[] nodes;
@Getter
private final int dimension;
public NDTreeModel(NDTreeNode[] nodes) {
this.nodes = nodes;
int max = 0;
for (NDTreeNode node : nodes) {
max = Math.max(max, node.axisIndex);
}
dimension = max + 1;
}
public static NDTreeModel getModelWithSplitValueInChildrenNodes(NDTreeNode[] nodes) {
updateWithSplitValue(nodes);
return new NDTreeModel(nodes);
}
/*
if min != max, use parent's split value as left child's max
and right child's min so that left and right share same nodes
*/
public static void updateWithSplitValue(NDTreeNode[] nodes) {
if (nodes == null || nodes.length <= 1) return;
preOrderTraversal(nodes, nodes[0], null);
}
private static void preOrderTraversal(
NDTreeNode[] nodes, NDTreeNode node, NDTreeNode parent) {
if (parent != null) {
List<Double> minList = node.getMin();
List<Double> maxList = node.getMax();
int axis = parent.axisIndex;
double min = minList.get(axis);
double max = maxList.get(axis);
if (min != max) {
if (node == nodes[parent.getLeftChild()]) {
maxList.set(axis, parent.splitValue);
} else {
minList.set(axis, parent.splitValue);
}
}
}
if (node.getLeftChild() > 0) {
preOrderTraversal(nodes, nodes[node.getLeftChild()], node);
}
if (node.getRightChild() > 0) {
preOrderTraversal(nodes, nodes[node.getRightChild()], node);
}
}
public NDTreeModel(List<NDTreeNode> nodeList) {
this(nodeList.toArray(new NDTreeNode[nodeList.size()]));
}
public static boolean isLeaf(NDTreeNode node) {
return node.getAxisIndex() == NDTreeModel.LEAF;
}
public int leaf(float ... coordinates) {
if (nodes == null || nodes.length == 0) return -1;
return binarySearch(nodes, coordinates, 0);
}
// Returns the indice of leaf containing the point.
public <T extends Number> int leaf(List<T> coordinates) {
if (nodes == null || nodes.length == 0) return -1;
return binarySearch(nodes, coordinates, 0);
}
public NDTreeNode getNode(int id) {
return nodes[id];
}
// Returns the indices of nodes traversed to get to the leaf containing the point.
public List<Integer> query(List<Float> coordinates) {
if (nodes == null) return Collections.EMPTY_LIST;
return query(nodes, coordinates, 0);
}
public List<Integer> query(float ... coordinates) {
if (nodes == null) return Collections.EMPTY_LIST;
return query(nodes, coordinates, 0);
}
// Returns the indices of all node overlapping the box
public List<Integer> queryBox(List<Double> min, List<Double> max) {
if (nodes == null) return Collections.EMPTY_LIST;
List<Integer> idx = new ArrayList<>();
assert (min.size() == max.size());
Stack<Integer> stack = new Stack<Integer>();
stack.push(0);
while (!stack.isEmpty()) {
int currIdx = stack.pop();
idx.add(currIdx);
NDTreeNode node = nodes[currIdx];
int index = node.axisIndex;
if (index > LEAF) {
if (min.get(index) < node.splitValue) {
stack.push(node.leftChild);
}
if (max.get(index) >= node.splitValue) {
stack.push(node.rightChild);
}
}
}
return idx;
}
public static Optional<NDTreeModel> readFromGzippedStream(InputStream inputStream) {
List<NDTreeNode> nodes = Util.readFromGzippedStream(NDTreeNode.class, inputStream);
if (!nodes.isEmpty()) {
return Optional.of(new NDTreeModel(nodes));
} else {
return Optional.absent();
}
}
public static Optional<NDTreeModel> readFromGzippedResource(String name) {
InputStream inputStream = java.lang.ClassLoader.getSystemResourceAsStream(name);
Optional<NDTreeModel> modelOptional = readFromGzippedStream(inputStream);
if (!modelOptional.isPresent()) {
log.error("Could not load resource named " + name);
}
return modelOptional;
}
public static Optional<NDTreeModel> readFromGzippedBase64String(String encoded) {
byte[] decoded = Base64.decodeBase64(encoded);
InputStream stream = new ByteArrayInputStream(decoded);
return readFromGzippedStream(stream);
}
private static int binarySearch(NDTreeNode[] a, Object key, int currIdx) {
while (true) {
int nextIdx = next(a[currIdx], key);
if (nextIdx == -1) {
return currIdx;
} else {
currIdx = nextIdx;
}
}
}
private static List<Integer> query(NDTreeNode[] a, Object key, int currIdx) {
List<Integer> idx = new ArrayList<>();
while (true) {
idx.add(currIdx);
int nextIdx = next(a[currIdx], key);
if (nextIdx == -1) {
return idx;
} else {
currIdx = nextIdx;
}
}
}
// TODO use https://github.com/facebook/swift
private static int next(NDTreeNode node, Object key) {
int index = node.axisIndex;
if (index == NDTreeModel.LEAF) {
// leaf
return -1;
} else {
if (key instanceof float[]) {
float[] coordinates = (float[]) key;
return nextChild(node, coordinates[index]);
} else if (key instanceof double[]) {
double[] coordinates = (double[]) key;
return nextChild(node, (float) coordinates[index]);
} else if (key instanceof List) {
Number x = (Number) ((List) key).get(index);
return nextChild(node, x);
} else {
throw new RuntimeException("obj " + key + " not supported");
}
}
}
private static int nextChild(NDTreeNode node, float value) {
if (value < node.splitValue) {
return node.leftChild;
} else {
return node.rightChild;
}
}
private static int nextChild(NDTreeNode node, Number value) {
if (value.doubleValue() < node.splitValue) {
return node.leftChild;
} else {
return node.rightChild;
}
}
}
| 7,207 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/FullRankLinearModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.LabelDictionaryEntry;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.MulticlassScoringResult;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.util.FloatVector;
import lombok.Getter;
import lombok.Setter;
// A full rank linear model that supports multi-class classification.
// The class vector Y = W' X where X is the feature vector.
// It is full rank because the matrix W is num-features by num-labels in dimension.
// Use a low rank model if you want better generalization.
public class FullRankLinearModel extends AbstractModel {
private static final long serialVersionUID = -849900702679383420L;
@Getter @Setter
private Map<String, Map<String, FloatVector>> weightVector;
@Getter @Setter
private ArrayList<LabelDictionaryEntry> labelDictionary;
@Getter @Setter
private Map<String, Integer> labelToIndex;
public FullRankLinearModel() {
weightVector = new HashMap<>();
labelDictionary = new ArrayList<>();
}
// In the binary case this is just the score for class 0.
// Ideally use a binary model for binary classification.
@Override
public float scoreItem(FeatureVector combinedItem) {
// Not supported.
assert(false);
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector sum = scoreFlatFeature(flatFeatures);
return sum.values[0];
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
// TODO(hector_yee) : implement debug.
return scoreItem(combinedItem);
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
int dim = labelDictionary.size();
for (Map.Entry<String, Map<String, Double>> entry : flatFeatures.entrySet()) {
String familyKey = entry.getKey();
Map<String, FloatVector> family = weightVector.get(familyKey);
if (family != null) {
for (Map.Entry<String, Double> feature : entry.getValue().entrySet()) {
String featureKey = feature.getKey();
FloatVector featureWeights = family.get(featureKey);
float val = feature.getValue().floatValue();
if (featureWeights != null) {
for (int i = 0; i < dim; i++) {
DebugScoreRecord record = new DebugScoreRecord();
record.setFeatureFamily(familyKey);
record.setFeatureName(featureKey);
record.setFeatureValue(val);
record.setFeatureWeight(featureWeights.get(i));
record.setLabel(labelDictionary.get(i).label);
scoreRecordsList.add(record);
}
}
}
}
}
return scoreRecordsList;
}
public ArrayList<MulticlassScoringResult> scoreItemMulticlass(FeatureVector combinedItem) {
ArrayList<MulticlassScoringResult> results = new ArrayList<>();
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector sum = scoreFlatFeature(flatFeatures);
for (int i = 0; i < labelDictionary.size(); i++) {
MulticlassScoringResult result = new MulticlassScoringResult();
result.setLabel(labelDictionary.get(i).getLabel());
result.setScore(sum.values[i]);
results.add(result);
}
return results;
}
public FloatVector scoreFlatFeature(Map<String, Map<String, Double>> flatFeatures) {
int dim = labelDictionary.size();
FloatVector sum = new FloatVector(dim);
for (Map.Entry<String, Map<String, Double>> entry : flatFeatures.entrySet()) {
Map<String, FloatVector> family = weightVector.get(entry.getKey());
if (family != null) {
for (Map.Entry<String, Double> feature : entry.getValue().entrySet()) {
FloatVector vec = family.get(feature.getKey());
if (vec != null) {
sum.multiplyAdd(feature.getValue().floatValue(), vec);
}
}
}
}
return sum;
}
public void buildLabelToIndex() {
labelToIndex = new HashMap<>();
for (int i = 0; i < labelDictionary.size(); i++) {
labelToIndex.put(labelDictionary.get(i).label, i);
}
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("full_rank_linear");
long count = 0;
for (Map.Entry<String, Map<String, FloatVector>> familyMap : weightVector.entrySet()) {
count += familyMap.getValue().entrySet().size();
}
header.setNumRecords(count);
header.setLabelDictionary(labelDictionary);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (Map.Entry<String, Map<String, FloatVector>> familyMap : weightVector.entrySet()) {
for (Map.Entry<String, FloatVector> feature : familyMap.getValue().entrySet()) {
ModelRecord record = new ModelRecord();
record.setFeatureFamily(familyMap.getKey());
record.setFeatureName(feature.getKey());
ArrayList<Double> arrayList = new ArrayList<Double>();
for (int i = 0; i < feature.getValue().values.length; i++) {
arrayList.add((double) feature.getValue().values[i]);
}
record.setWeightVector(arrayList);
writer.write(Util.encode(record));
writer.newLine();
}
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
labelDictionary = new ArrayList<>();
for (LabelDictionaryEntry entry : header.getLabelDictionary()) {
labelDictionary.add(entry);
}
buildLabelToIndex();
weightVector = new HashMap<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, FloatVector> inner = weightVector.get(family);
if (inner == null) {
inner = new HashMap<>();
weightVector.put(family, inner);
}
FloatVector vec = new FloatVector(record.getWeightVector().size());
for (int j = 0; j < record.getWeightVector().size(); j++) {
vec.values[j] = record.getWeightVector().get(j).floatValue();
}
inner.put(name, vec);
}
}
} | 7,208 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/AdditiveModel.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.features.SparseLabeledPoint;
import com.airbnb.aerosolve.core.function.AbstractFunction;
import com.airbnb.aerosolve.core.function.Function;
import com.airbnb.aerosolve.core.util.Util;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Random;
import static com.airbnb.aerosolve.core.function.FunctionUtil.toFloat;
/**
* A generalized additive model with a parametric function per feature. See
* http://en.wikipedia.org/wiki/Generalized_additive_model
*
* Aside from common functionality AdditiveModel has special optimization that uses an indexer for
* features. This allows efficient storage of sparse feature vector such that they could be persist
* into memory or disk if desired. This is achieved by calling `generateFeatureIndexer` function
* after model has been initialized with all feature weights, which arranges all known features into
* an array. All feature vector can now be flatten into a sparse vector according to index of
* feature in corresponding feature indexer. Subsequent model update can then be made via array
* lookup instead of nested map lookup. This is done via calling `generateWeightVector` to populate
* the array of function corresponding to the feature.
*/
@Slf4j
public class AdditiveModel extends AbstractModel implements Cloneable {
public static final String DENSE_FAMILY = "dense";
@Getter
@Setter
private Map<String, Map<String, Function>> weights = new HashMap<>();
// only MultiDimensionSpline using denseWeights
// whole dense features belongs to feature family DENSE_FAMILY
private Map<String, Function> denseWeights;
private Map<String, Function> getOrCreateDenseWeights() {
if (denseWeights == null) {
denseWeights = weights.get(DENSE_FAMILY);
if (denseWeights == null) {
denseWeights = weights.put(DENSE_FAMILY, new HashMap<>());
}
}
return denseWeights;
}
// featureIndexer maps features to a unique consecutive ascending index
// mapping training data via this index before shuffling can save a lot of time
@Getter
private Map<String, Map<String, Integer>> featureIndexer = new HashMap<>();
// weightVector takes the index generated above and construct an indexed weight function vector
@Getter
private Function[] weightVector = new Function[0];
/**
* Generate the feature indexer which maps each feature to a unique integer index
*
* @apiNote Subsequent `generateWeightVector` calls will use this index. This index does not
* automatically update when features are added or removed.
*/
public AdditiveModel generateFeatureIndexer() {
featureIndexer.clear();
int count = 0;
for (Map.Entry<String, Map<String, Function>> family : weights.entrySet()) {
String familyName = family.getKey();
Map<String, Integer> featureIndex = new HashMap<>();
featureIndexer.put(familyName, featureIndex);
for (Map.Entry<String, Function> feature : family.getValue().entrySet()) {
featureIndex.put(feature.getKey(), count++);
}
}
// (re)initialize the weight vector to be populated
weightVector = new Function[count];
return this;
}
/**
* Populate the weight vector with index weight function according to feature indexer
*
* @apiNote `generateFeatureIndexer` must be called before this function if there is any feature
* set modification No error/boundary checking is performed in this function. This function is
* automatically called during `clone`.
*/
public AdditiveModel generateWeightVector() {
for (Map.Entry<String, Map<String, Integer>> family : featureIndexer.entrySet()) {
String familyName = family.getKey();
for (Map.Entry<String, Integer> feature : family.getValue().entrySet()) {
weightVector[feature.getValue()] = weights.get(familyName).get(feature.getKey());
}
}
return this;
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
return scoreFlatFeatures(flatFeatures) + scoreDenseFeatures(combinedItem.getDenseFeatures());
}
public float scoreDenseFeatures(Map<String, List<Double>> denseFeatures) {
float sum = 0;
if (denseFeatures != null && !denseFeatures.isEmpty()) {
Map<String, Function> denseWeights = getOrCreateDenseWeights();
for (Map.Entry<String, List<Double>> feature : denseFeatures.entrySet()) {
String featureName = feature.getKey();
Function fun = denseWeights.get(featureName);
if (fun == null) continue;
sum += fun.evaluate(toFloat(feature.getValue()));
}
}
return sum;
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
float sum = 0.0f;
// order by the absolute value
PriorityQueue<Map.Entry<String, Float>> scores =
new PriorityQueue<>(100, new LinearModel.EntryComparator());
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, Function> familyWeightMap = weights.get(featureFamily.getKey());
if (familyWeightMap == null)
continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
Function func = familyWeightMap.get(feature.getKey());
if (func == null)
continue;
float val = feature.getValue().floatValue();
float subScore = func.evaluate(val);
sum += subScore;
String str = featureFamily.getKey() + ":" + feature.getKey() + "=" + val
+ " = " + subScore + "<br>\n";
scores.add(new AbstractMap.SimpleEntry<>(str, subScore));
}
}
Map<String, List<Double>> denseFeatures = combinedItem.getDenseFeatures();
if (denseFeatures != null) {
assert (denseWeights != null);
for (Map.Entry<String, List<Double>> feature : denseFeatures.entrySet()) {
String featureName = feature.getKey();
Function fun = denseWeights.get(featureName);
float[] val = toFloat(feature.getValue());
float subScore = fun.evaluate(val);
sum += subScore;
String str = DENSE_FAMILY + ":" + featureName + "=" + val
+ " = " + subScore + "<br>\n";
scores.add(new AbstractMap.SimpleEntry<>(str, subScore));
}
}
final int MAX_COUNT = 100;
builder.append("Top scores ===>\n");
if (!scores.isEmpty()) {
int count = 0;
float subsum = 0.0f;
while (!scores.isEmpty()) {
Map.Entry<String, Float> entry = scores.poll();
builder.append(entry.getKey());
float val = entry.getValue();
subsum += val;
count = count + 1;
if (count >= MAX_COUNT) {
builder.append("Leftover = " + (sum - subsum) + '\n');
break;
}
}
}
builder.append("Total = " + sum + '\n');
return sum;
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, Function> familyWeightMap = weights.get(featureFamily.getKey());
if (familyWeightMap == null) continue;
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
Function func = familyWeightMap.get(feature.getKey());
if (func == null) continue;
float val = feature.getValue().floatValue();
float weight = func.evaluate(val);
DebugScoreRecord record = new DebugScoreRecord();
record.setFeatureFamily(featureFamily.getKey());
record.setFeatureName(feature.getKey());
record.setFeatureValue(val);
record.setFeatureWeight(weight);
scoreRecordsList.add(record);
}
}
Map<String, List<Double>> denseFeatures = combinedItem.getDenseFeatures();
if (denseFeatures != null) {
Map<String, Function> denseWeights = getOrCreateDenseWeights();
for (Map.Entry<String, List<Double>> feature : denseFeatures.entrySet()) {
String featureName = feature.getKey();
Function fun = denseWeights.get(featureName);
float[] val = toFloat(feature.getValue());
float weight = fun.evaluate(val);
DebugScoreRecord record = new DebugScoreRecord();
record.setFeatureFamily(DENSE_FAMILY);
record.setFeatureName(feature.getKey());
record.setDenseFeatureValue(feature.getValue());
record.setFeatureWeight(weight);
scoreRecordsList.add(record);
}
}
return scoreRecordsList;
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
slope = header.getSlope();
offset = header.getOffset();
weights = new HashMap<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, Function> inner = weights.get(family);
if (inner == null) {
inner = new HashMap<>();
weights.put(family, inner);
}
inner.put(name, AbstractFunction.buildFunction(record));
}
}
@Override
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("additive");
header.setSlope(slope);
header.setOffset(offset);
long count = 0;
for (Map.Entry<String, Map<String, Function>> familyMap : weights.entrySet()) {
count += familyMap.getValue().size();
}
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (Map.Entry<String, Map<String, Function>> familyMap : weights.entrySet()) {
String featureFamily = familyMap.getKey();
for (Map.Entry<String, Function> feature : familyMap.getValue().entrySet()) {
Function func = feature.getValue();
String featureName = feature.getKey();
writer.write(Util.encode(func.toModelRecord(featureFamily, featureName)));
writer.newLine();
}
}
writer.flush();
}
public float scoreFlatFeatures(Map<String, Map<String, Double>> flatFeatures) {
float sum = 0.0f;
for (Map.Entry<String, Map<String, Double>> featureFamily : flatFeatures.entrySet()) {
Map<String, Function> familyWeightMap = weights.get(featureFamily.getKey());
if (familyWeightMap == null) {
// not important families/features are removed from model
log.debug("miss featureFamily {}", featureFamily.getKey());
continue;
}
for (Map.Entry<String, Double> feature : featureFamily.getValue().entrySet()) {
Function func = familyWeightMap.get(feature.getKey());
if (func == null)
continue;
float val = feature.getValue().floatValue();
sum += func.evaluate(val);
}
}
return sum;
}
public float scoreFeatures(SparseLabeledPoint point, double dropout, Random rand) {
float prediction = 0;
for (int i = 0; i < point.indices.length; i++) {
if (dropout <= 0 || rand.nextDouble() > dropout) {
int index = point.indices[i];
Function function = weightVector[index];
float value = point.values[i];
prediction += function.evaluate(value);
}
}
for (int i = 0; i < point.denseIndices.length; i++) {
if (dropout <= 0 || rand.nextDouble() > dropout) {
int index = point.denseIndices[i];
Function function = weightVector[index];
float[] value = point.denseValues[i];
prediction += function.evaluate(value);
}
}
return prediction;
}
public Map<String, Function> getOrCreateFeatureFamily(String featureFamily) {
Map<String, Function> featFamily = weights.get(featureFamily);
if (featFamily == null) {
featFamily = new HashMap<>();
weights.put(featureFamily, featFamily);
}
return featFamily;
}
public void addFunction(String featureFamily, String featureName,
Function function, boolean overwrite) {
if (function == null) {
throw new RuntimeException(featureFamily + " " + featureName + " function null");
}
Map<String, Function> featFamily = getOrCreateFeatureFamily(featureFamily);
if (overwrite || !featFamily.containsKey(featureName)) {
featFamily.put(featureName, function);
}
}
public void update(float gradWithLearningRate, SparseLabeledPoint point, double dropout, Random rand) {
for (int i = 0; i < point.indices.length; i++) {
if (dropout <= 0 || rand.nextDouble() > dropout) {
int index = point.indices[i];
Function function = weightVector[index];
float value = point.values[i];
function.update(-gradWithLearningRate, value);
}
}
for (int i = 0; i < point.denseIndices.length; i++) {
if (dropout <= 0 || rand.nextDouble() > dropout) {
int index = point.denseIndices[i];
Function function = weightVector[index];
float[] value = point.denseValues[i];
function.update(-gradWithLearningRate, value);
}
}
}
@Override
public AdditiveModel clone() throws CloneNotSupportedException {
AdditiveModel copy = (AdditiveModel) super.clone();
// deep copy weights
Map<String, Map<String, Function>> newWeights = new HashMap<>();
weights.forEach((k, v) -> newWeights.put(k, copyFeatures(v)));
copy.weights = newWeights;
copy.denseWeights = copyFeatures(denseWeights);
// regenerate weight vector
copy.weightVector = new Function[copy.weightVector.length];
return copy.generateWeightVector();
}
private Map<String, Function> copyFeatures(Map<String, Function> featureMap) {
if (featureMap == null) return null;
Map<String, Function> newFeatureMap = new HashMap<>();
featureMap.forEach((feature, function) -> {
try {
newFeatureMap.put(feature, function.clone());
} catch (CloneNotSupportedException e) {
// Java8 stream does not handle checked exception properly and requires explicit handling
e.printStackTrace();
}
});
return newFeatureMap;
}
}
| 7,209 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/MlpModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.*;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.util.FloatVector;
import lombok.Getter;
import lombok.Setter;
/**
* Multilayer perceptron (MLP) model https://en.wikipedia.org/wiki/Multilayer_perceptron
* The current implementation is for the case where there is only one output node.
*/
public class MlpModel extends AbstractModel {
private static final long serialVersionUID = -6870862764598907090L;
// weights that define the projection from input layer (layer0) to the first hidden layer
// or the output layer
@Getter
@Setter
private Map<String, Map<String, FloatVector>> inputLayerWeights;
// if there is hidden layer, this defines the projection
// from one hidden layer to the next hidden layer or output layer
// map from hidden layer id to hidden layer weights, id starts from 0
@Getter
@Setter
private Map<Integer, ArrayList<FloatVector>> hiddenLayerWeights;
// map from layer Id to bias applied on each node in the layer
@Getter
@Setter
private Map<Integer, FloatVector> bias;
@Getter
@Setter
private ArrayList<FunctionForm> activationFunction;
// number of layers (excluding input layer and output layer)
@Getter
@Setter
private int numHiddenLayers;
// number of nodes for each hidden layer and output layer (does not include input layer)
@Getter
@Setter
private ArrayList<Integer> layerNodeNumber;
@Getter
@Setter
private Map<Integer, FloatVector> layerActivations;
public MlpModel() {
layerNodeNumber = new ArrayList<>();
inputLayerWeights = new HashMap<>();
hiddenLayerWeights = new HashMap<>();
layerActivations = new HashMap<>();
bias = new HashMap<>();
activationFunction = new ArrayList<>();
}
public MlpModel(ArrayList<FunctionForm> activation, ArrayList<Integer> nodeNumbers) {
// n is the number of hidden layers (including output layer, excluding input layer)
// activation specifies activation function
// nodeNumbers: specifies number of nodes in each hidden layer
numHiddenLayers = nodeNumbers.size() - 1; // excluding output layer
activationFunction = activation;
layerNodeNumber = nodeNumbers;
assert(activation.size() == numHiddenLayers + 1);
inputLayerWeights = new HashMap<>();
hiddenLayerWeights = new HashMap<>();
// bias including the bias added at the output layer
bias = new HashMap<>();
layerActivations = new HashMap<>();
for (int i = 0; i <= numHiddenLayers; i++) {
int nodeNum = nodeNumbers.get(i);
if (i < numHiddenLayers) {
hiddenLayerWeights.put(i, new ArrayList<>(nodeNum));
}
bias.put(i, new FloatVector(nodeNum));
layerActivations.put(i, new FloatVector(nodeNum));
}
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
return forwardPropagation(flatFeatures);
}
public float forwardPropagation(Map<String, Map<String, Double>> flatFeatures) {
projectInputLayer(flatFeatures, 0.0);
for (int i = 0; i < numHiddenLayers; i++) {
projectHiddenLayer(i, 0.0);
}
return layerActivations.get(numHiddenLayers).get(0);
}
public float forwardPropagationWithDropout(Map<String, Map<String, Double>> flatFeatures, Double dropout) {
// reference: George E. Dahl et al. "IMPROVING DEEP NEURAL NETWORKS FOR LVCSR USING RECTIFIED LINEAR UNITS AND DROPOUT"
// scale the input to a node by 1/(1-dropout), so that we don't need to rescale model weights after training
// make sure the value is between 0 and 1
assert(dropout > 0.0);
assert(dropout < 1.0);
projectInputLayer(flatFeatures, dropout);
for (int i = 0; i < numHiddenLayers; i++) {
projectHiddenLayer(i, dropout);
}
return layerActivations.get(numHiddenLayers).get(0);
}
public FloatVector projectInputLayer(Map<String, Map<String, Double>> flatFeatures, Double dropout) {
// compute the projection from input feature space to the first hidden layer or
// output layer if there is no hidden layer
// output: fvProjection is a float vector representing the activation at the first layer after input layer
int outputNodeNum = layerNodeNumber.get(0);
FloatVector fvProjection = layerActivations.get(0);
if (fvProjection == null) {
fvProjection = new FloatVector(outputNodeNum);
layerActivations.put(0, fvProjection);
} else {
// recompute activation every time we do forward propagation
fvProjection.setConstant(0.0f);
}
for (Map.Entry<String, Map<String, Double>> entry : flatFeatures.entrySet()) {
Map<String, FloatVector> family = inputLayerWeights.get(entry.getKey());
if (family != null) {
for (Map.Entry<String, Double> feature : entry.getValue().entrySet()) {
FloatVector vec = family.get(feature.getKey());
if (vec != null) {
if (dropout > 0.0 && Math.random() < dropout) continue;
fvProjection.multiplyAdd(feature.getValue().floatValue(), vec);
}
}
}
}
if (dropout > 0.0 && dropout < 1.0) {
fvProjection.scale(1.0f / (1.0f - dropout.floatValue()));
}
// add bias for the first hidden layer or output layer
fvProjection.add(bias.get(0));
applyActivation(fvProjection, activationFunction.get(0));
return fvProjection;
}
public FloatVector projectHiddenLayer(int hiddenLayerId, Double dropout) {
int outputLayerId = hiddenLayerId + 1;
int outputDim = layerNodeNumber.get(outputLayerId);
FloatVector output = layerActivations.get(outputLayerId);
if (output == null) {
output = new FloatVector(outputDim);
layerActivations.put(outputLayerId, output);
} else {
output.setConstant(0.0f);
}
FloatVector input = layerActivations.get(hiddenLayerId);
ArrayList<FloatVector> weights = hiddenLayerWeights.get(hiddenLayerId);
for (int i = 0; i < input.length(); i++) {
if (dropout > 0.0 && Math.random() < dropout) continue;
output.multiplyAdd(input.get(i), weights.get(i));
}
if (dropout > 0.0 && dropout < 1.0) {
output.scale(1.0f / (1.0f - dropout.floatValue()));
}
output.multiplyAdd(1.0f, bias.get(outputLayerId));
applyActivation(output, activationFunction.get(outputLayerId));
return output;
}
private void applyActivation(FloatVector input, FunctionForm func) {
switch (func) {
case SIGMOID: {
input.sigmoid();
break;
}
case RELU: {
input.rectify();
break;
}
case TANH: {
input.tanh();
break;
}
case IDENTITY: {
break;
}
default: {
// set sigmoid activation as default
input.sigmoid();
}
}
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
// TODO(peng): implement debug
return scoreItem(combinedItem);
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
// TODO(peng): implement debugScoreComponents
return new ArrayList<>();
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("multilayer_perceptron");
header.setNumHiddenLayers(numHiddenLayers);
ArrayList<Integer> nodeNum = new ArrayList<>();
for (int i = 0; i < numHiddenLayers + 1; i++) {
// this includes the number of node at the output layer
nodeNum.add(layerNodeNumber.get(i));
}
header.setNumberHiddenNodes(nodeNum);
long count = 0;
for (Map.Entry<String, Map<String, FloatVector>> familyMap : inputLayerWeights.entrySet()) {
count += familyMap.getValue().entrySet().size();
}
// number of record for the input layer weights
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
// save the input layer weight, one record per feature
for (Map.Entry<String, Map<String, FloatVector>> familyMap : inputLayerWeights.entrySet()) {
for (Map.Entry<String, FloatVector> feature : familyMap.getValue().entrySet()) {
ModelRecord record = new ModelRecord();
record.setFeatureFamily(familyMap.getKey());
record.setFeatureName(feature.getKey());
ArrayList<Double> arrayList = new ArrayList<>();
for (int i = 0; i < feature.getValue().length(); i++) {
arrayList.add((double) feature.getValue().values[i]);
}
record.setWeightVector(arrayList);
writer.write(Util.encode(record));
writer.newLine();
}
}
// save the bias for each layer after input layer, one record per layer
for (int i = 0; i < numHiddenLayers + 1; i++) {
ArrayList<Double> arrayList = new ArrayList<>();
FloatVector layerBias = bias.get(i);
int n = layerBias.length();
ModelRecord record = new ModelRecord();
for (int j = 0; j < n; j++) {
arrayList.add((double) layerBias.get(j));
}
record.setWeightVector(arrayList);
record.setFunctionForm(activationFunction.get(i));
writer.write(Util.encode(record));
writer.newLine();
}
// save the hiddenLayerWeights, one record per (layer + node)
for (int i = 0; i < numHiddenLayers; i++) {
ArrayList<FloatVector> weights = hiddenLayerWeights.get(i);
for (int j = 0; j < layerNodeNumber.get(i); j++) {
FloatVector w = weights.get(j);
ModelRecord record = new ModelRecord();
ArrayList<Double> arrayList = new ArrayList<>();
for (int k = 0; k < w.length(); k++) {
arrayList.add((double) w.get(k));
}
record.setWeightVector(arrayList);
writer.write(Util.encode(record));
writer.newLine();
}
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
numHiddenLayers = header.getNumHiddenLayers();
List<Integer> hiddenNodeNumber = header.getNumberHiddenNodes();
for (int i = 0; i < hiddenNodeNumber.size(); i++) {
layerNodeNumber.add(hiddenNodeNumber.get(i));
}
// load input layer weights
long rows = header.getNumRecords();
for (int i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, FloatVector> inner = inputLayerWeights.get(family);
if (inner == null) {
inner = new HashMap<>();
inputLayerWeights.put(family, inner);
}
FloatVector vec = new FloatVector(record.getWeightVector().size());
for (int j = 0; j < record.getWeightVector().size(); j++) {
vec.values[j] = record.getWeightVector().get(j).floatValue();
}
inner.put(name, vec);
}
// load bias and activation function
for (int i = 0; i < numHiddenLayers + 1; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
List<Double> arrayList = record.getWeightVector();
FloatVector layerBias = new FloatVector(arrayList.size());
for (int j = 0; j < arrayList.size(); j++) {
layerBias.set(j, arrayList.get(j).floatValue());
}
bias.put(i, layerBias);
activationFunction.add(record.getFunctionForm());
}
// load the hiddenLayerWeights, one record per (layer + node)
for (int i = 0; i < numHiddenLayers; i++) {
ArrayList<FloatVector> weights = new ArrayList<>();
for (int j = 0; j < layerNodeNumber.get(i); j++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
List<Double> arrayList = record.getWeightVector();
FloatVector w = new FloatVector(arrayList.size());
for (int k = 0; k < arrayList.size(); k++) {
w.set(k, arrayList.get(k).floatValue());
}
weights.add(w);
}
hiddenLayerWeights.put(i, weights);
}
}
}
| 7,210 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/ForestModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.lang.StringBuilder;
import java.util.Map;
import java.util.List;
import java.util.HashMap;
import java.util.ArrayList;
import com.airbnb.aerosolve.core.*;
import com.airbnb.aerosolve.core.util.Util;
import lombok.Getter;
import lombok.Setter;
// A tree forest model.
public class ForestModel extends AbstractModel {
private static final long serialVersionUID = 3651061358422885378L;
@Getter @Setter
protected ArrayList<DecisionTreeModel> trees;
public ForestModel() {
}
@Override
public float scoreItem(FeatureVector combinedItem) {
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
float sum = 0.0f;
// Note: we sum instead of average so that the trainer has the option of boosting the
// trees together.
for (int i = 0; i < trees.size(); i++) {
sum += trees.get(i).scoreFlattenedFeature(floatFeatures);
}
return sum;
}
@Override
public ArrayList<MulticlassScoringResult> scoreItemMulticlass(FeatureVector combinedItem) {
HashMap<String, Double> map = new HashMap<>();
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
// Note: we sum instead of average so that the trainer has the option of boosting the
// trees together.
for (int i = 0; i < trees.size(); i++) {
ArrayList<MulticlassScoringResult> tmp = trees.get(i).scoreFlattenedFeatureMulticlass(
floatFeatures);
for (MulticlassScoringResult result : tmp) {
Double v = map.get(result.label);
if (v == null) {
map.put(result.label, result.score);
} else {
map.put(result.label, v + result.score);
}
}
}
ArrayList<MulticlassScoringResult> results = new ArrayList<>();
for (Map.Entry<String, Double> entry : map.entrySet()) {
MulticlassScoringResult result = new MulticlassScoringResult();
result.setLabel(entry.getKey());
result.setScore(entry.getValue());
results.add(result);
}
return results;
}
@Override
// Forests don't usually have debuggable components.
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
return 0.0f;
}
@Override
// Forests don't usually have debuggable components.
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
return scoreRecordsList;
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("forest");
long count = trees.size();
header.setNumRecords(count);
header.setSlope(slope);
header.setOffset(offset);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (DecisionTreeModel tree : trees) {
tree.save(writer);
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long numTrees = header.getNumRecords();
slope = header.getSlope();
offset = header.getOffset();
trees = new ArrayList<>();
for (long i = 0; i < numTrees; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
DecisionTreeModel tree = new DecisionTreeModel();
tree.loadInternal(record.getModelHeader(), reader);
trees.add(tree);
}
}
}
| 7,211 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/BoostedStumpsModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.util.Util;
import lombok.Getter;
import lombok.Setter;
// A simple boosted decision stump model that only operates on float features.
public class BoostedStumpsModel extends AbstractModel {
private static final long serialVersionUID = 3651061358422885377L;
@Getter @Setter
protected List<ModelRecord> stumps;
public BoostedStumpsModel() {
}
// Returns true if >= stump, false otherwise.
public static boolean getStumpResponse(ModelRecord stump,
Map<String, Map<String, Double>> floatFeatures) {
Map<String, Double> feat = floatFeatures.get(stump.featureFamily);
// missing feature corresponding to false (left branch)
if (feat == null) {
return false;
}
Double val = feat.get(stump.featureName);
if (val == null) {
return false;
}
if (val >= stump.getThreshold()) {
return true;
} else {
return false;
}
}
@Override
public float scoreItem(FeatureVector combinedItem) {
float sum = 0.0f;
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
for (ModelRecord stump : stumps) {
if (getStumpResponse(stump, floatFeatures)) {
sum += stump.featureWeight;
}
}
return sum;
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
float sum = 0.0f;
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
for (ModelRecord stump : stumps) {
boolean response = getStumpResponse(stump, floatFeatures);
String output = stump.featureFamily + ':' + stump.getFeatureName();
Double threshold = stump.threshold;
Double weight = stump.featureWeight;
if (response) {
builder.append(output);
builder.append(" >= " + threshold.toString() + " ==> " + weight.toString());
sum += stump.featureWeight;
}
}
return sum;
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
Map<String, Map<String, Double>> floatFeatures = Util.flattenFeature(combinedItem);
for (ModelRecord stump : stumps) {
boolean response = getStumpResponse(stump, floatFeatures);
if (response) {
DebugScoreRecord record = new DebugScoreRecord();
record.setFeatureFamily(stump.featureFamily);
record.setFeatureName(stump.featureName);
record.setFeatureValue(floatFeatures.get(stump.featureFamily).get(stump.featureName));
record.setFeatureWeight(stump.featureWeight);
scoreRecordsList.add(record);
}
}
return scoreRecordsList;
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("boosted_stumps");
long count = stumps.size();
header.setNumRecords(count);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (ModelRecord rec : stumps) {
writer.write(Util.encode(rec));
writer.newLine();
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
stumps = new ArrayList<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
stumps.add(record);
}
}
}
| 7,212 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/LowRankLinearModel.java | package com.airbnb.aerosolve.core.models;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
import com.airbnb.aerosolve.core.DebugScoreRecord;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.LabelDictionaryEntry;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.MulticlassScoringResult;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.util.FloatVector;
import lombok.Getter;
import lombok.Setter;
// A low rank linear model that supports multi-class classification.
// The class vector y = W' * V * x where x is d-dim feature vector.
// Suppose we have Y different labels and the D is the dimension of the joint feature-label space
// V: D-by-d matrix, mapping from feature space to the joint embedding
// W: D-by-Y matrix, mapping from label space to the joint embedding
// Reference: Jason Weston et al. "WSABIE: Scaling Up To Large Vocabulary Image Annotation", IJCAI 2011.
public class LowRankLinearModel extends AbstractModel {
static final long serialVersionUID = -8894096678183767660L;
// featureWeightVector represents the projection from feature space to embedding
// Map feature family name, feature name to a column in V
// each FloatVector in the map is a D-dim vector
@Getter
@Setter
private Map<String, Map<String, FloatVector>> featureWeightVector;
// labelWeightVector represents the projection from label space to embedding
// Map label to a row in W, each FloatVector in the map is a D-dim vector
@Getter
@Setter
private Map<String, FloatVector> labelWeightVector;
@Getter
@Setter
private ArrayList<LabelDictionaryEntry> labelDictionary;
@Getter
@Setter
private Map<String, Integer> labelToIndex;
// size of the embedding
@Getter
@Setter
private int embeddingDimension;
public LowRankLinearModel() {
featureWeightVector = new HashMap<>();
labelWeightVector = new HashMap<>();
labelDictionary = new ArrayList<>();
}
// In the binary case this is just the score for class 0.
// Ideally use a binary model for binary classification.
@Override
public float scoreItem(FeatureVector combinedItem) {
// Not supported.
assert (false);
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector sum = scoreFlatFeature(flatFeatures);
return sum.values[0];
}
@Override
public float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder) {
// TODO(peng) : implement debug.
return scoreItem(combinedItem);
}
@Override
public List<DebugScoreRecord> debugScoreComponents(FeatureVector combinedItem) {
// TODO(peng): implement debugScoreComponents
List<DebugScoreRecord> scoreRecordsList = new ArrayList<>();
return scoreRecordsList;
}
public ArrayList<MulticlassScoringResult> scoreItemMulticlass(FeatureVector combinedItem) {
ArrayList<MulticlassScoringResult> results = new ArrayList<>();
Map<String, Map<String, Double>> flatFeatures = Util.flattenFeature(combinedItem);
FloatVector sum = scoreFlatFeature(flatFeatures);
for (int i = 0; i < labelDictionary.size(); i++) {
MulticlassScoringResult result = new MulticlassScoringResult();
result.setLabel(labelDictionary.get(i).getLabel());
result.setScore(sum.values[i]);
results.add(result);
}
return results;
}
public FloatVector scoreFlatFeature(Map<String, Map<String, Double>> flatFeatures) {
FloatVector fvProjection = projectFeatureToEmbedding(flatFeatures);
return projectEmbeddingToLabel(fvProjection);
}
public FloatVector projectFeatureToEmbedding(Map<String, Map<String, Double>> flatFeatures) {
FloatVector fvProjection = new FloatVector(embeddingDimension);
// compute the projection from feature space to D-dim joint space
for (Map.Entry<String, Map<String, Double>> entry : flatFeatures.entrySet()) {
Map<String, FloatVector> family = featureWeightVector.get(entry.getKey());
if (family != null) {
for (Map.Entry<String, Double> feature : entry.getValue().entrySet()) {
FloatVector vec = family.get(feature.getKey());
if (vec != null) {
fvProjection.multiplyAdd(feature.getValue().floatValue(), vec);
}
}
}
}
return fvProjection;
}
public FloatVector projectEmbeddingToLabel(FloatVector fvProjection) {
int dim = labelDictionary.size();
FloatVector sum = new FloatVector(dim);
// compute the projection from D-dim joint space to label space
for (int i = 0; i < dim; i++) {
String labelKey = labelDictionary.get(i).getLabel();
FloatVector labelVector = labelWeightVector.get(labelKey);
if (labelVector != null) {
float val = labelVector.dot(fvProjection);
sum.set(i, val);
}
}
return sum;
}
public void buildLabelToIndex() {
labelToIndex = new HashMap<>();
for (int i = 0; i < labelDictionary.size(); i++) {
String labelKey = labelDictionary.get(i).label;
labelToIndex.put(labelKey, i);
}
}
public void save(BufferedWriter writer) throws IOException {
ModelHeader header = new ModelHeader();
header.setModelType("low_rank_linear");
long count = 0;
for (Map.Entry<String, Map<String, FloatVector>> familyMap : featureWeightVector.entrySet()) {
count += familyMap.getValue().entrySet().size();
}
header.setNumRecords(count);
header.setLabelDictionary(labelDictionary);
Map<String, java.util.List<Double>> labelEmbedding = new HashMap<>();
for (Map.Entry<String, FloatVector> labelRepresentation : labelWeightVector.entrySet()) {
float[] values = labelRepresentation.getValue().getValues();
ArrayList<Double> arrayList = new ArrayList<>();
for (int i = 0; i < embeddingDimension; i++) {
arrayList.add((double) values[i]);
}
labelEmbedding.put(labelRepresentation.getKey(), arrayList);
}
header.setLabelEmbedding(labelEmbedding);
ModelRecord headerRec = new ModelRecord();
headerRec.setModelHeader(header);
writer.write(Util.encode(headerRec));
writer.newLine();
for (Map.Entry<String, Map<String, FloatVector>> familyMap : featureWeightVector.entrySet()) {
for (Map.Entry<String, FloatVector> feature : familyMap.getValue().entrySet()) {
ModelRecord record = new ModelRecord();
record.setFeatureFamily(familyMap.getKey());
record.setFeatureName(feature.getKey());
ArrayList<Double> arrayList = new ArrayList<>();
for (int i = 0; i < feature.getValue().values.length; i++) {
arrayList.add((double) feature.getValue().values[i]);
}
record.setWeightVector(arrayList);
writer.write(Util.encode(record));
writer.newLine();
}
}
writer.flush();
}
@Override
protected void loadInternal(ModelHeader header, BufferedReader reader) throws IOException {
long rows = header.getNumRecords();
labelDictionary = new ArrayList<>();
for (LabelDictionaryEntry entry : header.getLabelDictionary()) {
labelDictionary.add(entry);
}
buildLabelToIndex();
labelWeightVector = new HashMap<>();
embeddingDimension = header.getLabelEmbedding().entrySet().iterator().next().getValue().size();
for (Map.Entry<String, java.util.List<Double>> labelRepresentation : header.getLabelEmbedding().entrySet()) {
java.util.List<Double> values = labelRepresentation.getValue();
String labelKey = labelRepresentation.getKey();
FloatVector labelWeight = new FloatVector(embeddingDimension);
for (int i = 0; i < embeddingDimension; i++) {
labelWeight.set(i, values.get(i).floatValue());
}
labelWeightVector.put(labelKey, labelWeight);
}
featureWeightVector = new HashMap<>();
for (long i = 0; i < rows; i++) {
String line = reader.readLine();
ModelRecord record = Util.decodeModel(line);
String family = record.getFeatureFamily();
String name = record.getFeatureName();
Map<String, FloatVector> inner = featureWeightVector.get(family);
if (inner == null) {
inner = new HashMap<>();
featureWeightVector.put(family, inner);
}
FloatVector vec = new FloatVector(record.getWeightVector().size());
for (int j = 0; j < record.getWeightVector().size(); j++) {
vec.values[j] = record.getWeightVector().get(j).floatValue();
}
inner.put(name, vec);
}
}
}
| 7,213 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/Model.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.FeatureVector;
/**
* Created by hector_yee on 8/25/14.
* Base class for models
*/
interface Model {
// Scores a single item. The transforms should already have been applied to
// the context and item and combined item.
float scoreItem(FeatureVector combinedItem);
// Debug scores a single item. These are explanations for why a model
// came up with the score.
float debugScoreItem(FeatureVector combinedItem,
StringBuilder builder);
}
| 7,214 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/models/ModelFactory.java | package com.airbnb.aerosolve.core.models;
import com.airbnb.aerosolve.core.ModelHeader;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.util.Util;
import com.google.common.base.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
public final class ModelFactory {
private static final Logger log = LoggerFactory.getLogger(ModelFactory.class);
private ModelFactory() {
}
// Creates
@SuppressWarnings("deprecation")
public static AbstractModel createByName(String name) {
switch (name) {
case "linear": return new LinearModel();
case "maxout": return new MaxoutModel();
case "spline": return new SplineModel();
case "boosted_stumps": return new BoostedStumpsModel();
case "decision_tree": return new DecisionTreeModel();
case "forest": return new ForestModel();
case "additive": return new AdditiveModel();
case "kernel" : return new KernelModel();
case "full_rank_linear" : return new FullRankLinearModel();
case "low_rank_linear" : return new LowRankLinearModel();
case "multilayer_perceptron" : return new MlpModel();
default:
log.info("Attempting to initialize " + name);
try {
return (AbstractModel) Class.forName(name).newInstance();
} catch (Exception e) {
log.error("Unable to initialize model by class name of " + name);
throw new RuntimeException(e);
}
}
}
public static Optional<AbstractModel> createFromReader(BufferedReader reader) throws IOException {
Optional<AbstractModel> model = Optional.absent();
String headerLine = reader.readLine();
ModelRecord record = Util.decodeModel(headerLine);
if (record == null) {
log.error("Could not decode header " + headerLine);
return model;
}
ModelHeader header = record.getModelHeader();
if (header != null) {
AbstractModel result = createByName(header.getModelType());
if (result != null) {
result.loadInternal(header, reader);
model = Optional.of(result);
}
}
return model;
}
}
| 7,215 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/Spline.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.ModelRecord;
import com.google.common.primitives.Floats;
import java.util.ArrayList;
import java.util.List;
// A piecewise linear spline implementation supporting updates.
public class Spline extends AbstractFunction {
private static final long serialVersionUID = 5166347177557768302L;
private int numBins;
private float scale;
private float binSize;
private float binScale;
public Spline(float minVal, float maxVal, float[] weights) {
setupSpline(minVal, maxVal, weights);
}
public Spline(float minVal, float maxVal, int numBins) {
if (maxVal <= minVal) {
maxVal = minVal + 1.0f;
}
setupSpline(minVal, maxVal, new float[numBins]);
}
/*
Generates new weights[] from numBins
*/
public float[] weightsByNumBins(int numBins) {
if (numBins == this.numBins) {
return weights;
} else {
return newWeights(numBins);
}
}
private float[] newWeights(int numBins) {
assert (numBins != this.numBins);
float[] newWeights = new float[numBins];
float scale = 1.0f / (numBins - 1.0f);
float diff = maxVal - minVal;
for (int i = 0; i < numBins; i++) {
float t = i * scale;
float x = diff * t + minVal;
newWeights[i] = evaluate(x);
}
return newWeights;
}
// A constructor from model record
public Spline(ModelRecord record) {
this.minVal = (float) record.getMinVal();
this.maxVal = (float) record.getMaxVal();
List<Double> weightVec = record.getWeightVector();
this.numBins = weightVec.size();
this.weights = new float[this.numBins];
for (int j = 0; j < numBins; j++) {
this.weights[j] = weightVec.get(j).floatValue();
}
float diff = Math.max(maxVal - minVal, 1e-10f);
this.scale = 1.0f / diff;
this.binSize = diff / (numBins - 1.0f);
this.binScale = 1.0f / binSize;
}
private void setupSpline(float minVal, float maxVal, float[] weights) {
this.weights = weights;
this.numBins = weights.length;
this.minVal = minVal;
this.maxVal = maxVal;
float diff = Math.max(maxVal - minVal, 1e-10f);
this.scale = 1.0f / diff;
this.binSize = diff / (numBins - 1.0f);
this.binScale = 1.0f / binSize;
}
@Override
public Function aggregate(Iterable<Function> functions, float scale, int numBins) {
float[] aggWeights = new float[numBins];
for (Function fun : functions) {
Spline spline = (Spline) fun;
float[] w = spline.weightsByNumBins(numBins);
for (int i = 0; i < numBins; i++) {
aggWeights[i] += scale * w[i];
}
}
return new Spline(minVal, maxVal, aggWeights);
}
@Override
public float evaluate(float... x) {
int bin = getBin(x[0]);
if (bin == numBins - 1) {
return weights[numBins - 1];
}
float t = getBinT(x[0], bin);
t = Math.max(0.0f, Math.min(1.0f, t));
float result = (1.0f - t) * weights[bin] + t * weights[bin + 1];
return result;
}
@Override
public void update(float delta, float... values) {
float x = values[0];
int bin = getBin(x);
if (bin == numBins - 1) {
weights[numBins - 1] += delta;
} else {
float t = getBinT(x, bin);
t = Math.max(0.0f, Math.min(1.0f, t));
weights[bin] += (1.0f - t) * delta;
weights[bin + 1] += t * delta;
}
}
@Override
public ModelRecord toModelRecord(String featureFamily, String featureName) {
ModelRecord record = new ModelRecord();
record.setFunctionForm(FunctionForm.Spline);
record.setFeatureFamily(featureFamily);
record.setFeatureName(featureName);
ArrayList<Double> arrayList = new ArrayList<Double>();
for (int i = 0; i < weights.length; i++) {
arrayList.add((double) weights[i]);
}
record.setWeightVector(arrayList);
record.setMinVal(minVal);
record.setMaxVal(maxVal);
return record;
}
@Override
public void resample(int newBins) {
if (newBins != numBins) {
setupSpline(minVal, maxVal, newWeights(newBins));
}
}
// Returns the lower bound bin
public int getBin(float x) {
int bin = (int) Math.floor((x - minVal) * scale * (numBins - 1));
bin = Math.max(0, Math.min(numBins - 1, bin));
return bin;
}
// Returns the t value in the bin (0, 1)
public float getBinT(float x, int bin) {
float lowerX = bin * binSize + minVal;
float t = (x - lowerX) * binScale;
t = Math.max(0.0f, Math.min(1.0f, t));
return t;
}
public float L1Norm() {
float sum = 0.0f;
for (int i = 0; i < weights.length; i++) {
sum += Math.abs(weights[i]);
}
return sum;
}
@Override
public float LInfinityNorm() {
return Math.max(Floats.max(weights), Math.abs(Floats.min(weights)));
}
@Override
public void LInfinityCap(float cap) {
if (cap <= 0.0f) return;
float currentNorm = this.LInfinityNorm();
if (currentNorm > cap) {
float scale = cap / currentNorm;
for (int i = 0; i < weights.length; i++) {
weights[i] *= scale;
}
}
}
@Override
public void setPriors(float[] params) {
float start = params[0];
float end = params[1];
// fit a line based on the input starting weight and ending weight
for (int i = 0; i < numBins; i++) {
float t = i / (numBins - 1.0f);
weights[i] = ((1.0f - t) * start + t * end);
}
}
@Override
public double smooth(double tolerance, boolean toleranceIsPercentage) {
return FunctionUtil.smooth(tolerance, toleranceIsPercentage, weights);
}
@Override
public Spline clone() throws CloneNotSupportedException {
Spline copy = (Spline) super.clone();
copy.weights = weights.clone();
return copy;
}
}
| 7,216 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/Linear.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.ModelRecord;
import java.util.ArrayList;
import java.util.List;
/**
* Linear function f(x) = weights[1]*x+weights[0]
*/
public class Linear extends AbstractFunction {
// weights[0] is offset, weights[1] is slope
public Linear(Linear other) {
weights = other.weights.clone();
minVal = other.getMinVal();
maxVal = other.getMaxVal();
}
public Linear(float minVal, float maxVal) {
this(minVal, maxVal, new float[2]);
}
public Linear(float minVal, float maxVal, float[] weights) {
this.weights = weights;
this.minVal = minVal;
this.maxVal = maxVal;
}
@Override
public Function aggregate(Iterable<Function> functions, float scale, int numBins) {
int length = weights.length;
float[] aggWeights = new float[length];
for (Function fun: functions) {
Linear linear = (Linear) fun;
for (int i = 0; i < length; i++) {
aggWeights[i] += scale * linear.weights[i];
}
}
return new Linear(minVal, maxVal, aggWeights);
}
public Linear(ModelRecord record) {
List<Double> weightVec = record.getWeightVector();
int n = weightVec.size();
weights = new float[2];
for (int j = 0; j < Math.min(n, 2); j++) {
weights[j] = weightVec.get(j).floatValue();
}
minVal = (float) record.getMinVal();
maxVal = (float) record.getMaxVal();
}
@Override
public void update(float delta, float ... values) {
weights[0] += delta;
weights[1] += delta * normalization(values[0]);
}
@Override
public void setPriors(float[] params) {
weights[0] = params[0];
weights[1] = params[1];
}
@Override
public float evaluate(float ... x) {
return weights[0] + weights[1] * normalization(x[0]);
}
@Override
public ModelRecord toModelRecord(String featureFamily, String featureName) {
ModelRecord record = new ModelRecord();
record.setFunctionForm(FunctionForm.Linear);
record.setFeatureFamily(featureFamily);
record.setFeatureName(featureName);
record.setMinVal(minVal);
record.setMaxVal(maxVal);
ArrayList<Double> arrayList = new ArrayList<Double>();
arrayList.add((double) weights[0]);
arrayList.add((double) weights[1]);
record.setWeightVector(arrayList);
return record;
}
@Override
public void LInfinityCap(float cap) {
if (cap <= 0.0f) return;
float currentNorm = this.LInfinityNorm();
if (currentNorm > cap) {
float scale = cap / currentNorm;
for (int i = 0; i < weights.length; i++) {
weights[i] *= scale;
}
}
}
@Override
public float LInfinityNorm() {
// return max absolute contribution
float f0 = weights[0];
float f1 = weights[0] + weights[1];
return Math.max(Math.abs(f0), Math.abs(f1));
}
private float normalization(float x) {
if (minVal < maxVal) {
return (x - minVal) / (maxVal - minVal);
} else if (minVal == maxVal && maxVal != 0){
return x / maxVal;
} else {
return x;
}
}
@Override
public void resample(int newBins) {
}
@Override
public double smooth(double tolerance, boolean toleranceIsPercentage) {
return 0;
}
@Override
public Linear clone() throws CloneNotSupportedException {
Linear copy = (Linear) super.clone();
copy.weights = weights.clone();
return copy;
}
}
| 7,217 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/Point.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.ModelRecord;
import java.util.ArrayList;
import java.util.List;
/**
* A point function y(x) = w where w is a constant. This usually represents one-hot feature.
*/
public class Point implements Function {
private float weight;
public Point() { }
public Point(float weight) {
this.weight = weight;
}
public Point(ModelRecord record) {
List<Double> weightVec = record.getWeightVector();
weight = weightVec.get(0).floatValue();
}
@Override
public Function aggregate(Iterable<Function> functions, float scale, int numBins) {
float aggWeight = 0;
for (Function fun: functions) {
Point point = (Point) fun;
aggWeight += scale * point.weight;
}
return new Point(aggWeight);
}
@Override
public float evaluate(float... x) {
return weight;
}
@Override
public float evaluate(List<Double> values) {
return weight;
}
@Override
public void update(float delta, float... values) {
weight += delta;
}
@Override
public void update(float delta, List<Double> values) {
weight += delta;
}
@Override
public ModelRecord toModelRecord(String featureFamily, String featureName) {
ModelRecord record = new ModelRecord();
record.setFunctionForm(FunctionForm.Point);
record.setFeatureFamily(featureFamily);
record.setFeatureName(featureName);
ArrayList<Double> arrayList = new ArrayList<Double>();
arrayList.add((double)weight);
record.setWeightVector(arrayList);
return record;
}
@Override
public void setPriors(float[] params) {
weight = params[0];
}
@Override
public void LInfinityCap(float cap) {
if (cap <= 0.0f) return;
float currentNorm = this.LInfinityNorm();
if (currentNorm > cap) {
float scale = cap / currentNorm;
weight *= scale;
}
}
@Override
public float LInfinityNorm() {
return Math.abs(weight);
}
@Override
public void resample(int newBins) {
}
@Override
public double smooth(double tolerance, boolean toleranceIsPercentage) {
return 0;
}
@Override
public Function clone() throws CloneNotSupportedException {
return new Point(weight);
}
}
| 7,218 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/FunctionUtil.java | package com.airbnb.aerosolve.core.function;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import java.util.Arrays;
import java.util.List;
@Slf4j
public class FunctionUtil {
public static float[] fitPolynomial(float[] data) {
int numCoeff = 6;
int iterations = numCoeff * 4;
float[] initial = new float[numCoeff];
float[] initialStep = new float[numCoeff];
Arrays.fill(initialStep, 1.0f);
return optimize(1.0 / 512.0, iterations, initial, initialStep,
new ImmutablePair<Float, Float>(-10.0f, 10.0f), data);
}
public static float evaluatePolynomial(float[] coeff, float[] data, boolean overwrite) {
int len = data.length;
float err = 0;
long count = 0;
for (int i = 0; i < len; i++) {
float t = (float) i / (len - 1);
float tinv = 1 - t;
float diracStart = (i == 0) ? coeff[0] : 0;
float diracEnd = (i == len - 1) ? coeff[1] : 0;
double eval = coeff[2] * tinv * tinv * tinv +
coeff[3] * 3.0 * tinv * tinv * t +
coeff[4] * 3.0 * tinv * t * t +
coeff[5] * t * t * t +
diracStart +
diracEnd;
if (data[i] != 0.0) {
err += Math.abs(eval - data[i]);
count++;
}
if (overwrite) {
data[i] = (float) eval;
}
}
return err / count;
}
// CyclicCoordinateDescent
public static float[] optimize(double tolerance, int iterations,
float[] initial, float[] initialStep,
Pair<Float, Float> bounds, float[] data) {
float[] best = initial;
float bestF = evaluatePolynomial(best, data, false);
int maxDim = initial.length;
for (int i = 0; i < iterations; ++i) {
for (int dim = 0; dim < maxDim; ++dim) {
float step = initialStep[dim];
while (step > tolerance) {
float[] left = best.clone();
left[dim] = Math.max(bounds.getLeft(), best[dim] - step);
float leftF = evaluatePolynomial(left, data, false);
float[] right = best.clone();
right[dim] = Math.min(bounds.getRight(), best[dim] + step);
float rightF = evaluatePolynomial(right, data, false);
if (leftF < bestF) {
best = left;
bestF = leftF;
}
if (rightF < bestF) {
best = right;
bestF = rightF;
}
step *= 0.5;
}
}
}
return best;
}
public static float[] toFloat(List<Double> list) {
float[] result = new float[list.size()];
for (int i = 0; i < result.length; i++) {
result[i] = list.get(i).floatValue();
}
return result;
}
/*
* @param tolerance if fitted array's deviation from weights is less than tolerance
* use the fitted, otherwise keep original weights.
* @param weights the curve you want to smooth
* @return double errAndCoeff in the weights
*/
public static double smooth(double tolerance, boolean toleranceIsPercentage, float[] weights) {
// TODO use apache math's PolynomialCurveFitter
float[] best = FunctionUtil.fitPolynomial(weights);
double errAndCoeff = FunctionUtil.evaluatePolynomial(best, weights, false);
if (toleranceIsPercentage) {
double absMean = getAbsMean(weights);
return smoothInternal(errAndCoeff, tolerance * absMean, best, weights) / absMean;
} else {
return smoothInternal(errAndCoeff, tolerance, best, weights);
}
}
private static double smoothInternal(
double errAndCoeff, double tolerance, float[] best, float[] weights) {
if (errAndCoeff < tolerance) {
FunctionUtil.evaluatePolynomial(best, weights, true);
}
return errAndCoeff;
}
public static double getAbsMean(float[] weights) {
double sum = 0;
for (float f : weights) {
sum += Math.abs(f);
}
return sum / weights.length;
}
}
| 7,219 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/Zero.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.ModelRecord;
import java.util.List;
/**
* This is a special case of a point where no contribution whatsoever is provided by this feature.
* This is intended to mark feature as deleted for future deletion. This is needed so we avoid
* re-indexing of the feature space for both model and data points. The behavior is simply no-op for
* most operations and always outputs 0 for scoring. Those features with such function should be
* deleted before final model persistence onto the disk.
*/
public class Zero implements Function {
@Override
public Function aggregate(Iterable<Function> functions, float scale, int numBins) {
return this;
}
@Override
public float evaluate(float... x) {
return 0;
}
@Override
public float evaluate(List<Double> values) {
return 0;
}
@Override
public void update(float delta, float... values) {
}
@Override
public void update(float delta, List<Double> values) {
}
@Override
public ModelRecord toModelRecord(String featureFamily, String featureName) {
throw new IllegalAccessError("Zero point should never be persisted. Please delete or skip this feature instead.");
}
@Override
public void setPriors(float[] params) {
}
@Override
public void LInfinityCap(float cap) {
}
@Override
public float LInfinityNorm() {
return 0;
}
@Override
public void resample(int newBins) {
}
@Override
public double smooth(double tolerance, boolean toleranceIsPercentage) {
return 0;
}
@Override
public Function clone() throws CloneNotSupportedException {
return this;
}
}
| 7,220 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/MultiDimensionSpline.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.NDTreeNode;
import com.airbnb.aerosolve.core.models.NDTreeModel;
import lombok.extern.slf4j.Slf4j;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class MultiDimensionSpline implements Function {
private static final long serialVersionUID = 5166347177557769302L;
private final NDTreeModel ndTreeModel;
// NDTree leaf maps to spline point
private final Map<Integer, List<MultiDimensionPoint>> weights;
private final List<MultiDimensionPoint> points;
public MultiDimensionSpline(NDTreeNode[] nodes) {
this(new NDTreeModel(nodes));
}
public MultiDimensionSpline(NDTreeModel ndTreeModel) {
this.ndTreeModel = ndTreeModel;
Map<List<Float>, MultiDimensionPoint> pointsMap = new HashMap<>();
weights = new HashMap<>();
NDTreeNode[] nodes = ndTreeModel.getNodes();
for (int i = 0; i < nodes.length; i++) {
NDTreeNode node = nodes[i];
if (node.getAxisIndex() == NDTreeModel.LEAF) {
List<MultiDimensionPoint> list = MultiDimensionPoint.getCombinationWithoutDuplication(
node.getMin(), node.getMax(), pointsMap);
if (list != null && !list.isEmpty()) {
weights.put(i, list);
} else {
log.info("leaf node return no MultiDimensionPoint {}", node);
}
}
}
points = new ArrayList<>(pointsMap.values());
if (canDoSmooth()) {
// sort 1D case for smooth,
// default MultiDimensionPoint Comparator compares weight
// so we need a new Comparator for compare coordinates
Collections.sort(points, MultiDimensionPoint.get1DCoordinateComparator());
}
}
public MultiDimensionSpline(ModelRecord record) {
this(new NDTreeModel(record.getNdtreeModel()), record.getWeightVector());
}
public MultiDimensionSpline(NDTreeModel ndTreeModel, List<Double> weights) {
this(ndTreeModel);
updateWeights(weights);
}
public String getWeightsString() {
return weights.values().toString();
}
// Spline is multi scale, so it needs numBins
// MultiDimensionSpline does not support multi scale.
@Override
public Function aggregate(Iterable<Function> functions, float scale, int numBins) {
// functions size == 1/scale
int length = points.size();
float[] aggWeights = new float[length];
for (Function fun: functions) {
MultiDimensionSpline spline = (MultiDimensionSpline) fun;
for (int i = 0; i < length; i++) {
aggWeights[i] += scale * spline.points.get(i).getWeight();
}
}
for (int i = 0; i < length; i++) {
points.get(i).setWeight(aggWeights[i]);
}
return this;
}
@Override
public float evaluate(float ... coordinates) {
List<MultiDimensionPoint> list = getNearbyPoints(coordinates);
double[] distance = new double[list.size()];
double sum = 0;
for (int i = 0; i < list.size(); i++) {
MultiDimensionPoint point = list.get(i);
distance[i] = point.getDistance(coordinates);
sum += distance[i];
}
return score(list, distance, sum);
}
@Override
public float evaluate(List<Double> coordinates) {
List<MultiDimensionPoint> list = getNearbyPoints(coordinates);
double[] distance = new double[list.size()];
double sum = 0;
for (int i = 0; i < list.size(); i++) {
MultiDimensionPoint point = list.get(i);
distance[i] = point.getDistance(coordinates);
sum += distance[i];
}
return score(list, distance, sum);
}
private static float score(List<MultiDimensionPoint> list, double[] distance, double sum) {
if (sum == 0) {
// only one point and input is at the point
assert (list.size() == 1);
return (float) list.get(0).getWeight();
} else {
float score = 0;
for (int i = 0; i < list.size(); i++) {
MultiDimensionPoint point = list.get(i);
score += point.getWeight() * (distance[i] / sum);
}
return score;
}
}
@Override
public void update(float delta, float ... values) {
List<MultiDimensionPoint> list = getNearbyPoints(values);
double[] distance = new double[list.size()];
double sum = 0;
for (int i = 0; i < list.size(); i++) {
MultiDimensionPoint point = list.get(i);
distance[i] = point.getDistance(values);
sum += distance[i];
}
update(delta, list, distance, sum);
}
@Override
public void update(float delta, List<Double> values){
List<MultiDimensionPoint> list = getNearbyPoints(values);
double[] distance = new double[list.size()];
double sum = 0;
for (int i = 0; i < list.size(); i++) {
MultiDimensionPoint point = list.get(i);
distance[i] = point.getDistance(values);
sum += distance[i];
}
update(delta, list, distance, sum);
}
private static void update(float delta, List<MultiDimensionPoint> list, double[] distance, double sum) {
if (sum == 0) {
// only one point and input is at the point
assert (list.size() == 1);
list.get(0).updateWeight(delta);
} else {
for (int i = 0; i < list.size(); i++) {
MultiDimensionPoint point = list.get(i);
point.updateWeight(delta * (distance[i] / sum));
}
}
}
@Override
public ModelRecord toModelRecord(String featureFamily, String featureName) {
ModelRecord record = new ModelRecord();
record.setFunctionForm(FunctionForm.MultiDimensionSpline);
record.setFeatureFamily(featureFamily);
record.setFeatureName(featureName);
record.setWeightVector(getWeightsFromList());
record.setNdtreeModel(Arrays.asList(ndTreeModel.getNodes()));
// Use first coordinates as x for now
record.setMinVal(points.get(0).getCoordinates().get(0));
record.setMaxVal(points.get(points.size()-1).getCoordinates().get(0));
return record;
}
private List<Double> getWeightsFromList() {
List<Double> weights = new ArrayList<>(points.size());
weights.addAll(points.stream().map(
MultiDimensionPoint::getWeight).collect(Collectors.toList()));
return weights;
}
private void updateWeights(List<Double> weights) {
assert (weights.size() == points.size());
for (int i = 0; i < points.size(); i++) {
MultiDimensionPoint p = points.get(i);
p.setWeight(weights.get(i));
}
}
private void updateWeights(float[] weights) {
for (int i = 0; i < points.size(); i++) {
MultiDimensionPoint p = points.get(i);
p.setWeight(weights[i]);
}
}
public static List<Double> toDouble(List<Float> list) {
List<Double> r = new ArrayList<>(list.size());
for (Float f: list) {
r.add(f.doubleValue());
}
return r;
}
@Override public void setPriors(float[] params) {
assert (params.length == points.size());
for (int i = 0; i < points.size(); i++) {
MultiDimensionPoint p = points.get(i);
p.setWeight(params[i]);
}
}
@Override
public void LInfinityCap(float cap) {
if (cap <= 0.0f) return;
float currentNorm = LInfinityNorm();
if (currentNorm > cap) {
float scale = cap / currentNorm;
for (int i = 0; i < points.size(); i++) {
points.get(i).scaleWeight(scale);
}
}
}
@Override
public float LInfinityNorm() {
return (float) Math.max(Collections.max(points).getWeight(),
Math.abs(Collections.min(points).getWeight()));
}
private List<MultiDimensionPoint> getNearbyPoints(float ... coordinates) {
int index = ndTreeModel.leaf(coordinates);
assert (index != -1 && weights.containsKey(index));
return weights.get(index);
}
private List<MultiDimensionPoint> getNearbyPoints(List<Double> coordinates) {
int index = ndTreeModel.leaf(coordinates);
assert (index != -1 && weights.containsKey(index));
return weights.get(index);
}
@Override
public void resample(int newBins) {
}
@Override
public double smooth(double tolerance, boolean toleranceIsPercentage) {
if (!canDoSmooth()) return 0;
float[] weights = getWeights();
double err = FunctionUtil.smooth(tolerance, toleranceIsPercentage, weights);
if (err < tolerance) {
updateWeights(weights);
}
return err;
}
private float[] getWeights() {
float[] weights = new float[points.size()];
for (int i = 0; i < points.size(); i++) {
MultiDimensionPoint p = points.get(i);
weights[i] = (float) p.getWeight();
}
return weights;
}
@Override
public MultiDimensionSpline clone() throws CloneNotSupportedException {
return new MultiDimensionSpline(this.ndTreeModel);
}
private boolean canDoSmooth() {
return ndTreeModel.getDimension() == 1;
}
/*
This drop out is specific for MultiDimensionSpline
*/
public static Map<String, List<Double>> featureDropout(
FeatureVector featureVector,
double dropout) {
Map<String, List<Double>> denseFeatures = featureVector.getDenseFeatures();
if (denseFeatures == null) return Collections.EMPTY_MAP;
Map<String, List<Double>> out = new HashMap<>();
for (Map.Entry<String, List<Double>> feature : denseFeatures.entrySet()) {
if (Math.random() < dropout) continue;
out.put(feature.getKey(), feature.getValue());
}
return out;
}
}
| 7,221 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/AbstractFunction.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.FunctionForm;
import com.airbnb.aerosolve.core.ModelRecord;
import lombok.Getter;
import java.util.Arrays;
import java.util.List;
/**
* Base class for functions
*/
public abstract class AbstractFunction implements Function {
@Getter
protected float[] weights;
@Getter
protected float minVal;
@Getter
protected float maxVal;
@Override
public String toString() {
return String.format("minVal=%f, maxVal=%f, weights=%s",
minVal, maxVal, Arrays.toString(weights));
}
@Override
public float evaluate(List<Double> values) {
throw new RuntimeException("method not implemented");
}
@Override
public void update(float delta, List<Double> values){
throw new RuntimeException("method not implemented");
}
public static Function buildFunction(ModelRecord record) {
FunctionForm funcForm = record.getFunctionForm();
try {
return (Function) Class.forName("com.airbnb.aerosolve.core.function." +
funcForm.name()).getDeclaredConstructor(ModelRecord.class).newInstance(record);
} catch (Exception e) {
e.printStackTrace();
}
throw new RuntimeException("unable to decode " + funcForm.name());
}
@Override
public AbstractFunction clone() throws CloneNotSupportedException {
return (AbstractFunction) super.clone();
}
}
| 7,222 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/MultiDimensionPoint.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.util.Util;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.util.*;
/*
represent a point in multi dimension space for Function
*/
public class MultiDimensionPoint implements Comparable<MultiDimensionPoint>, Serializable {
private static final long serialVersionUID = 2166347177557769302L;
@Getter // TODO change FloatVector to List<T> and use FloatVector here.
private List<Float> coordinates;
@Getter @Setter
private double weight;
public MultiDimensionPoint(List<Float> coordinates) {
this.coordinates = coordinates;
}
public void updateWeight(double delta) {
weight += delta;
}
public void scaleWeight(float scale) {
weight *= scale;
}
/*
Generate combination coordinates from min and max list,
Create new points if the coordinate is not in points map
if it is in point map, reuse it.
return all MultiDimensionPoint from the combination
TODO FIX IT points is Float and min/max is Double, should be same, but due to
all other Function and models use float. so points is Float.
and thrift is use Double so it is List<Double>
*/
public static List<MultiDimensionPoint> getCombinationWithoutDuplication(
List<Double> min, List<Double> max, Map<List<Float>, MultiDimensionPoint> points) {
List<List<Float>> keys = getCombination(min, max);
List<MultiDimensionPoint> result = new ArrayList<>();
for (List<Float> key: keys) {
MultiDimensionPoint p = points.get(key);
if (p == null) {
p = new MultiDimensionPoint(key);
points.put(key, p);
}
result.add(p);
}
return result;
}
public static List<List<Float>> getCombination(List<Double> min, List<Double> max) {
Set<List<Float>> set = new HashSet<>();
assert (min.size() == max.size());
int coordinateSize = min.size();
int keySize = 1 << coordinateSize;
for (int i = 0; i < keySize; ++i) {
int k = i;
List<Float> r = new ArrayList<>();
for (int j = 0; j < coordinateSize; ++j) {
if ((k & 1) == 1) {
r.add(max.get(j).floatValue());
} else {
r.add(min.get(j).floatValue());
}
k >>= 1;
}
set.add(r);
}
return new ArrayList<>(set);
}
@Override
public boolean equals(Object aThat){
if (this == aThat) return true;
if (!(aThat instanceof MultiDimensionPoint)) {
return false;
}
MultiDimensionPoint point = (MultiDimensionPoint) aThat;
return coordinates.equals(point.coordinates);
}
@Override
public int hashCode(){
return coordinates.hashCode();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for (Float d: coordinates) {
sb.append(d);
sb.append(" ");
}
sb.append(" w ");
sb.append(weight);
return sb.toString();
}
public float getDistance(float[] coordinates) {
return Util.euclideanDistance(coordinates, this.coordinates);
}
public float getDistance(List<Double> coordinates) {
return Util.euclideanDistance(coordinates, this.coordinates);
}
@Override // used in LInfinityNorm
public int compareTo(MultiDimensionPoint o) {
final int BEFORE = -1;
final int EQUAL = 0;
final int AFTER = 1;
if (this == o) return EQUAL;
//primitive numbers follow this form
if (this.weight < o.weight) return BEFORE;
if (this.weight > o.weight) return AFTER;
return EQUAL;
}
/*
only support 1D for now
*/
public static Comparator<MultiDimensionPoint> get1DCoordinateComparator() {
return (a, b) -> a.coordinates.get(0).compareTo(b.coordinates.get(0));
}
}
| 7,223 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/function/Function.java | package com.airbnb.aerosolve.core.function;
import com.airbnb.aerosolve.core.ModelRecord;
import java.io.Serializable;
import java.util.List;
public interface Function extends Serializable, Cloneable {
// TODO rename numBins to something else, since it's a Spline specific thing
Function aggregate(Iterable<Function> functions, float scale, int numBins);
float evaluate(float ... x);
// TODO change all float to double
float evaluate(List<Double> values);
void update(float delta, float ... values);
void update(float delta, List<Double> values);
ModelRecord toModelRecord(String featureFamily, String featureName);
void setPriors(float[] params);
void LInfinityCap(float cap);
float LInfinityNorm();
void resample(int newBins);
// for function not support smooth, just return 0
// for function support smooth, return errAndCoeff
double smooth(double tolerance, boolean toleranceIsPercentage);
Function clone() throws CloneNotSupportedException;
}
| 7,224 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/BucketFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Map;
import java.util.Map.Entry;
/**
* Buckets float features and places them in a new float column.
*/
public class BucketFloatTransform implements Transform {
private String fieldName1;
private double bucket;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
bucket = config.getDouble(key + ".bucket");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null || feature1.isEmpty()) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (Entry<String, Double> feature : feature1.entrySet()) {
Double dbl = TransformUtil.quantize(feature.getValue(), bucket);
Double newVal = feature.getValue() - dbl;
String name = feature.getKey() + '[' + bucket + "]=" + dbl;
output.put(name, newVal);
}
}
}
| 7,225 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MultiscaleGridQuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.*;
/**
* Created by hector_yee on 8/25/14.
* Quantizes the floatFeature named in "field1" with buckets in "bucket" before placing
* it in the stringFeature named "output"
*/
public class MultiscaleGridQuantizeTransform implements Transform {
private String fieldName1;
private List<Double> buckets;
private String outputName;
private String value1;
private String value2;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
buckets = config.getDoubleList(key + ".buckets");
outputName = config.getString(key + ".output");
value1 = config.getString(key + ".value1");
value2 = config.getString(key + ".value2");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Double v1 = feature1.get(value1);
Double v2 = feature1.get(value2);
if (v1 == null || v2 == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
transformFeature(v1, v2, buckets, output);
}
public static void transformFeature(double v1, double v2, List<Double> buckets, Set<String> output) {
for (Double bucket : buckets) {
transformFeature(v1, v2, bucket, output);
}
}
public static void transformFeature(double v1, double v2, double bucket, Set<String> output) {
double q1 = TransformUtil.quantize(v1, bucket);
double q2 = TransformUtil.quantize(v2, bucket);
output.add("[" + bucket + "]=(" + q1 + ',' + q2 + ')');
}
}
| 7,226 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CoalesceFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.transforms.types.FloatTransform;
import java.util.List;
import java.util.Map;
import com.typesafe.config.Config;
/**
* Fill in a default value for float features when features are missing
*/
public class CoalesceFloatTransform extends FloatTransform {
private List<String> keys;
private double value;
@Override
public void init(Config config, String key) {
keys = config.getStringList(key + ".keys");
value = config.getDouble(key + ".value");
}
@Override
public void output(Map<String, Double> input, Map<String, Double> output) {
for (String key : keys) {
Double v = input.get(key);
if (v == null) {
output.put(key, value);
}
}
}
}
| 7,227 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DivideTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* output = field1.keys / (field2.key2 + constant)
*/
public class DivideTransform implements Transform {
private String fieldName1;
private String fieldName2;
private List<String> keys;
private String key2;
private String outputName;
private Double constant;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
keys = config.getStringList(key + ".keys");
key2 = config.getString(key + ".key2");
constant = config.getDouble((key + ".constant"));
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> feature2 = floatFeatures.get(fieldName2);
if (feature2 == null) {
return;
}
Double div = feature2.get(key2);
if (div == null) {
return;
}
Double scale = 1.0 / (constant + div);
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String key : keys) {
if (feature1.containsKey(key)) {
Double val = feature1.get(key);
if (val != null) {
output.put(key + "-d-" + key2, val * scale);
}
}
}
}
}
| 7,228 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CustomMultiscaleQuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.*;
import java.util.Map.Entry;
/**
* Quantize the floatFeature named in "field1" with buckets in "bucket" before placing
* it in the stringFeature named "output".
* "field1" specifies feature family name.
* If "select_features" is specified, we only transform features in the select_features list.
* If "exclude_features" is specified, we transform features that are not in the exclude_features list.
* If both "select_features" and "exclude_features" are specified, we transform features that are in
* "select_features" list and not in "exclude_features" list.
*/
public class CustomMultiscaleQuantizeTransform implements Transform {
private String fieldName1;
private List<Double> buckets;
private String outputName;
private List<String> excludeFeatures;
private List<String> selectFeatures;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
buckets = config.getDoubleList(key + ".buckets");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".exclude_features")) {
excludeFeatures = config.getStringList(key + ".exclude_features");
}
if (config.hasPath(key + ".select_features")) {
selectFeatures = config.getStringList(key + ".select_features");
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (Entry<String, Double> feature : feature1.entrySet()) {
if ((excludeFeatures == null || !excludeFeatures.contains(feature.getKey())) &&
(selectFeatures == null || selectFeatures.contains(feature.getKey()))) {
transformAndAddFeature(buckets,
feature.getKey(),
feature.getValue(),
output);
}
}
}
public static void transformAndAddFeature(List<Double> buckets,
String featureName,
Double featureValue,
Set<String> output) {
if (featureValue == 0.0) {
output.add(featureName + "=0");
return;
}
for (double bucket : buckets) {
double quantized = TransformUtil.quantize(featureValue, bucket);
output.add(featureName + '[' + bucket + "]=" + quantized);
}
}
}
| 7,229 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CutFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.List;
import java.util.Map;
/*
remove features larger than upperBound or smaller than lowerBound
*/
public class CutFloatTransform implements Transform {
private String fieldName1;
private List<String> keys;
private double lowerBound;
private double upperBound;
private String outputName; // output family name, if not specified, output to fieldName1
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
keys = config.getStringList(key + ".keys");
if (config.hasPath(key + ".lower_bound")) {
lowerBound = config.getDouble(key + ".lower_bound");
} else {
lowerBound = -Double.MAX_VALUE;
}
if (config.hasPath(key + ".upper_bound")) {
upperBound = config.getDouble(key + ".upper_bound");
} else {
upperBound = Double.MAX_VALUE;
}
if (config.hasPath(key + ".output")) {
outputName = config.getString(key + ".output");
} else {
outputName = fieldName1;
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> feature2 = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String key : keys) {
Double v = feature1.get(key);
if (v != null) {
if (v > upperBound || v < lowerBound) {
if (feature2 == feature1) {
feature2.remove(key);
}
} else if (feature2 != feature1) {
feature2.put(key, v);
}
}
}
}
}
| 7,230 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DeleteStringFeatureFamilyTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.typesafe.config.Config;
/**
* "fields" specifies a list of string feature families to be deleted
*/
public class DeleteStringFeatureFamilyTransform implements Transform {
private List<String> fieldNames;
@Override
public void configure(Config config, String key) {
fieldNames = config.getStringList(key + ".fields");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) {
return;
}
if (fieldNames == null) {
return;
}
for (String fieldName: fieldNames) {
Set<String> feature = stringFeatures.get(fieldName);
if (feature != null) {
stringFeatures.remove(fieldName);
}
}
}
}
| 7,231 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/SelfCrossTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.HashSet;
import java.util.Set;
import java.util.Map;
/**
* Takes the self cross product of stringFeatures named in field1
* and places it in a stringFeature with family name specified in output.
*/
public class SelfCrossTransform implements Transform {
private String fieldName1;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) return;
Set<String> set1 = stringFeatures.get(fieldName1);
if (set1 == null) return;
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
selfCross(set1, output);
}
public static void selfCross(Set<String> set1, Set<String> output) {
for (String s1 : set1) {
for (String s2 : set1) {
// To prevent duplication we only take pairs there s1 < s2.
if (s1.compareTo(s2) < 0) {
output.add(s1 + '^' + s2);
}
}
}
}
}
| 7,232 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/Transform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.typesafe.config.Config;
import java.io.Serializable;
import java.util.stream.Stream;
/**
* Created by hector_yee on 8/25/14.
* Base class for feature transforms.
*/
public interface Transform extends Serializable {
/**
* Configure the transform from the supplied config and key. <p> This is where initialization
* should take place. Ideally we want this to be a constructor instead or use a builder pattern.
*/
void configure(Config config, String key);
/**
* Apply this transform to a single feature vector.
*/
void doTransform(FeatureVector featureVector);
/**
* Applies this transform to a series of featureVector.
*
* @implNote this function can be overridden if the transform can be applied much more efficiency
* in (small) batches If such implementation exists, one would typically override the single
* feature vector implementation with the following instead:
* <pre> <code>
* @Override
* public void doTransform(FeatureVector featureVector) {
* doTransform(Stream.of(featureVector));
* }
* </code> </pre>
*/
default void doTransform(Iterable<FeatureVector> featureVectors) {
featureVectors.forEach(this::doTransform);
}
}
| 7,233 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatFamilyCrossToTwoDDenseTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* cross one float family with the other and out a 2D dense feature.
* if fieldsName2 missing, this is a self cross
* self cross use key's alphabetical order to determine order
* so you can add 1_ 2_ in front of features to manipulate order.
*/
public class FloatFamilyCrossToTwoDDenseTransform implements Transform {
private String fieldName1;
private String fieldName2;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
String path = key + ".field2";
if (config.hasPath(path)) {
fieldName2 = config.getString(path);
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> map1 = floatFeatures.get(fieldName1);
if (map1 == null || map1.isEmpty()) return;
if (fieldName2 != null) {
Map<String, Double> map2 = floatFeatures.get(fieldName2);
if (map2 == null || map2.isEmpty()) return;
cross(map1, map2, featureVector);
} else {
selfCross(map1, featureVector);
}
}
private void selfCross(Map<String, Double> map1, FeatureVector featureVector) {
if (map1.size() <= 1) return;
List<Map.Entry<String, Double>> list = new ArrayList<>(map1.size());
list.addAll(map1.entrySet());
for (int i = 0; i < list.size(); ++i) {
for (int j = i+1; j < list.size(); ++j) {
Map.Entry<String, Double> a = list.get(i);
Map.Entry<String, Double> b = list.get(j);
String key1 = a.getKey();
String key2 = b.getKey();
// use key's alphabetical order to determine order
if (key1.compareTo(key2) < 0) {
Util.setDenseFeature(featureVector,
a.getKey() + "^" + b.getKey(),
Arrays.asList(a.getValue(), b.getValue()));
} else {
Util.setDenseFeature(featureVector,
b.getKey() + "^" + a.getKey(),
Arrays.asList(b.getValue(), a.getValue()));
}
}
}
}
static void cross(
Map<String, Double> map1,
Map<String, Double> map2,
FeatureVector featureVector) {
for (Map.Entry<String, Double> a : map1.entrySet()) {
for (Map.Entry<String, Double> b : map2.entrySet()) {
Util.setDenseFeature(featureVector,
a.getKey() + "^" + b.getKey(),
Arrays.asList(a.getValue(), b.getValue()));
}
}
}
}
| 7,234 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DeleteStringFeatureColumnTransform.java | package com.airbnb.aerosolve.core.transforms;
// TODO: remove this once all configs have migrated over to the new transform names
public class DeleteStringFeatureColumnTransform extends DeleteStringFeatureFamilyTransform {}
| 7,235 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/ReplaceAllStringsTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.transforms.types.StringTransform;
import java.util.List;
import java.util.Map;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigObject;
/**
* Replaces all substrings that match a given regex with a replacement string
* "field1" specifies the key of the feature
* "replacements" specifies a list of pairs (or maps) of regexes and corresponding replacements
* Replacements are performed in the same order as specified in the list of pairs
* "replacement" specifies the replacement string
*/
public class ReplaceAllStringsTransform extends StringTransform {
private List<? extends ConfigObject> replacements;
@Override
public void init(Config config, String key) {
replacements = config.getObjectList(key + ".replacements");
}
@Override
public String processString(String rawString) {
if (rawString == null) {
return null;
}
for (ConfigObject replacementCO : replacements) {
Map<String, Object> replacementMap = replacementCO.unwrapped();
for (Map.Entry<String, Object> replacementEntry : replacementMap.entrySet()) {
String regex = replacementEntry.getKey();
String replacement = (String) replacementEntry.getValue();
rawString = rawString.replaceAll(regex, replacement);
}
}
return rawString;
}
}
| 7,236 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/NormalizeFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.typesafe.config.Config;
import java.util.Map;
// L2 normalizes a float feature
public class NormalizeFloatTransform implements Transform {
private String fieldName1;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
double norm = 0.0;
for (Map.Entry<String, Double> feat : feature1.entrySet()) {
norm += feat.getValue() * feat.getValue();
}
if (norm > 0.0) {
double scale = 1.0 / Math.sqrt(norm);
for (Map.Entry<String, Double> feat : feature1.entrySet()) {
feat.setValue(feat.getValue() * scale);
}
}
}
}
| 7,237 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DeleteFloatFeatureTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.typesafe.config.Config;
import java.util.List;
import java.util.Map;
public class DeleteFloatFeatureTransform implements Transform {
private String fieldName1;
private List<String> keys;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
keys = config.getStringList(key + ".keys");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
for (String key : keys) {
feature1.remove(key);
}
}
}
| 7,238 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/SubtractTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
/**
* output = field1 - field2.key
*/
public class SubtractTransform implements Transform {
private String fieldName1;
private String fieldName2;
private List<String> keys;
private String key2;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
keys = config.getStringList(key + ".keys");
key2 = config.getString(key + ".key2");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> feature2 = floatFeatures.get(fieldName2);
if (feature2 == null) {
return;
}
Double sub = feature2.get(key2);
if (sub == null) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String key : keys) {
if (feature1.containsKey(key)) {
Double val = feature1.get(key);
if (val != null) {
output.put(key + '-' + key2, val - sub);
}
}
}
}
}
| 7,239 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MathFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import java.util.List;
import java.util.Map;
import java.util.function.DoubleFunction;
/**
* Apply given Math function on specified float features defined by fieldName1 and keys
* fieldName1: feature family name
* keys: feature names
* outputName: output feature family name (feature names or keys remain the same)
* function: a string that specified the function that is going to apply to the given feature
*/
public class MathFloatTransform implements Transform {
private String fieldName1; // feature family name
private List<String> keys; // feature names
private String outputName; // output feature family name
private String functionName; // a string that specified the function that is going to apply to the given feature
private Optional<DoubleFunction<Double>> func;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
if (config.hasPath(key + ".keys")) {
keys = config.getStringList(key + ".keys");
}
outputName = config.getString(key + ".output");
functionName = config.getString(key + ".function");
func = getFunction();
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (keys.isEmpty()) {
return;
}
if (!func.isPresent()) {
return;
}
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String key : keys) {
Double v = feature1.get(key);
if (v != null) {
Double result = func.get().apply(v);
if (!result.isNaN() && !result.isInfinite()) {
output.put(key, result);
}
}
}
}
private Optional<DoubleFunction<Double>> getFunction() {
switch (functionName) {
case "sin":
return Optional.of((double x) -> Math.sin(x));
case "cos":
return Optional.of((double x) -> Math.cos(x));
case "log10":
// return the original value if x <= 0
return Optional.of((double x) -> Math.log10(x));
case "log":
// return the original value if x <= 0
return Optional.of((double x) -> Math.log(x));
case "abs":
return Optional.of((double x) -> Math.abs(x));
}
return Optional.<DoubleFunction<Double>>absent();
}
}
| 7,240 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/Transformer.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.Example;
import com.airbnb.aerosolve.core.FeatureVector;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
import com.typesafe.config.Config;
public class Transformer implements Serializable {
private static final long serialVersionUID = 1569952057032186608L;
// The transforms to be applied to the context, item and combined
// (context | item) respectively.
private final Transform contextTransform;
private final Transform itemTransform;
private final Transform combinedTransform;
public Transformer(Config config, String key) {
// Configures the model transforms.
// context_transform : name of ListTransform to apply to context
// item_transform : name of ListTransform to apply to each item
// combined_transform : name of ListTransform to apply to each (item context) pair
String contextTransformName = config.getString(key + ".context_transform");
contextTransform = TransformFactory.createTransform(config, contextTransformName);
String itemTransformName = config.getString(key + ".item_transform");
itemTransform = TransformFactory.createTransform(config, itemTransformName);
String combinedTransformName = config.getString(key + ".combined_transform");
combinedTransform = TransformFactory.createTransform(config, combinedTransformName);
}
// Helper functions for transforming context, items or combined feature vectors.
public void transformContext(FeatureVector context) {
if (contextTransform != null && context != null) {
contextTransform.doTransform(context);
}
}
public void transformItem(FeatureVector item) {
if (itemTransform != null && item != null) {
itemTransform.doTransform(item);
}
}
public void transformItems(List<FeatureVector> items) {
if (items != null) {
items.forEach(this::transformItem);
}
}
/**
* Apply combined transform to a (already context-combined) feature vector
*/
public void transformCombined(FeatureVector combined) {
if (combinedTransform != null && combined != null) {
combinedTransform.doTransform(combined);
}
}
/**
* Apply combined transform to a stream of (already context-combined) feature vector
*/
public void transformCombined(Iterable<FeatureVector> combined) {
if (combinedTransform != null && combined != null) {
combinedTransform.doTransform(combined);
}
}
/**
* In place apply all the transforms to the context and items,
* add context to examples,
* and apply the combined transform to now combined examples.
*/
public void combineContextAndItems(Example examples) {
transformContext(examples.context);
transformItems(examples.example);
addContextToItemsAndTransform(examples);
}
/**
* Adds the context to items and applies the combined transform
*/
public void addContextToItemsAndTransform(Example examples) {
addContextToItems(examples);
transformCombined(examples.example);
}
/**
* Adds the context's features to examples' features
*/
public void addContextToItems(Example examples) {
Map<String, Set<String>> contextStringFeatures = null;
Map<String, Map<String, Double>> contextFloatFeatures = null;
Map<String, List<Double>> contextDenseFeatures = null;
if (examples.context != null) {
if (examples.context.stringFeatures != null) {
contextStringFeatures = examples.context.getStringFeatures();
}
if (examples.context.floatFeatures != null) {
contextFloatFeatures = examples.context.getFloatFeatures();
}
if (examples.context.denseFeatures != null) {
contextDenseFeatures = examples.context.getDenseFeatures();
}
}
for (FeatureVector item : examples.example) {
addContextToItem(contextStringFeatures, contextFloatFeatures, contextDenseFeatures, item);
}
}
/**
* Adds context features to an individual feature vector
*/
private void addContextToItem(Map<String, Set<String>> contextStringFeatures,
Map<String, Map<String, Double>> contextFloatFeatures,
Map<String, List<Double>> contextDenseFeatures,
FeatureVector item) {
if (contextStringFeatures != null) {
if (item.getStringFeatures() == null) {
item.setStringFeatures(new HashMap<>());
}
Map<String, Set<String>> itemStringFeatures = item.getStringFeatures();
for (Map.Entry<String, Set<String>> stringFeature : contextStringFeatures.entrySet()) {
Set<String> stringFeatureValueCopy = new HashSet<>(stringFeature.getValue());
itemStringFeatures.put(stringFeature.getKey(), stringFeatureValueCopy);
}
}
if (contextFloatFeatures != null) {
if (item.getFloatFeatures() == null) {
item.setFloatFeatures(new HashMap<>());
}
Map<String, Map<String, Double>> itemFloatFeatures = item.getFloatFeatures();
for (Map.Entry<String, Map<String, Double>> floatFeature : contextFloatFeatures.entrySet()) {
Map<String, Double> floatFeatureValueCopy = new HashMap<>(floatFeature.getValue());
itemFloatFeatures.put(floatFeature.getKey(), floatFeatureValueCopy);
}
}
if (contextDenseFeatures != null) {
if (item.getDenseFeatures() == null) {
item.setDenseFeatures(new HashMap<>());
}
Map<String, List<Double>> itemDenseFeatures = item.getDenseFeatures();
for (Map.Entry<String, List<Double>> denseFeature : contextDenseFeatures.entrySet()) {
List<Double> denseFeatureValueCopy = new ArrayList<>(denseFeature.getValue());
itemDenseFeatures.put(denseFeature.getKey(), denseFeatureValueCopy);
}
}
}
}
| 7,241 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MultiscaleQuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.*;
import java.util.Map.Entry;
/**
* Created by hector_yee on 8/25/14.
* Quantizes the floatFeature named in "field1" with buckets in "bucket" before placing
* it in the stringFeature named "output"
*/
public class MultiscaleQuantizeTransform implements Transform {
private String fieldName1;
private List<Double> buckets;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
buckets = config.getDoubleList(key + ".buckets");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (Entry<String, Double> feature : feature1.entrySet()) {
transformAndAddFeature(buckets,
feature.getKey(),
feature.getValue(),
output);
}
}
public static void transformAndAddFeature(List<Double> buckets,
String featureName,
Double featureValue,
Set<String> output) {
if (featureValue == 0.0) {
output.add(featureName + "=0");
return;
}
for (double bucket : buckets) {
double quantized = TransformUtil.quantize(featureValue, bucket);
output.add(featureName + '[' + bucket + "]=" + quantized);
}
}
}
| 7,242 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/WtaTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.*;
/**
* A transform that applies the winner takes all hash to
* A set of dense feature families and emits tokens to a string feature family.
* See "The power of comparative reasoning"
* http://research.google.com/pubs/pub37298.html
* For ease of use we use the recommended window size of 4 features
* to generate 2-bit tokens
* and pack each word with num_tokens_per_word of these.
*/
public class WtaTransform implements Transform {
private List<String> fieldNames;
private String outputName;
private int seed;
private int numWordsPerFeature;
private int numTokensPerWord;
private final byte windowSize = 4;
@Override
public void configure(Config config, String key) {
// What fields to use to construct the hash.
fieldNames = config.getStringList(key + ".field_names");
// Name of field to output to.
outputName = config.getString(key + ".output");
// The seed of the random number generator.
seed = config.getInt(key + ".seed");
// The number of words per feature.
numWordsPerFeature = config.getInt(key + ".num_words_per_feature");
// The number of tokens per word.
numTokensPerWord = config.getInt(key + ".num_tokens_per_word");
assert(numTokensPerWord <= 16);
}
// Generates a permutation of the array and appends it
// to a given deque.
private void generatePermutation(int size,
Random rnd,
Deque<Integer> dq) {
dq.clear();
int[] permutation = new int[size];
for (int i = 0; i < size; i++) {
permutation[i] = i;
}
for (int i = 0; i < size; i++) {
int other = rnd.nextInt(size);
int tmp = permutation[i];
permutation[i] = permutation[other];
permutation[other] = tmp;
}
for (int i = 0; i < size; i++) {
dq.add(permutation[i]);
}
}
private int getToken(Deque<Integer> dq,
List<Double> feature,
Random rnd) {
if (dq.size() < windowSize) {
generatePermutation(feature.size(), rnd, dq);
}
byte largest = 0;
double largestValue = feature.get(dq.pollFirst());
for (byte i = 1; i < windowSize; i++) {
double value = feature.get(dq.pollFirst());
if (value > largestValue) {
largestValue = value;
largest = i;
}
}
return largest;
}
private int getWord(Deque<Integer> dq,
List<Double> feature,
Random rnd) {
int result = 0;
for (int i = 0; i < numTokensPerWord; i++) {
result |= getToken(dq, feature, rnd) << 2 * i;
}
return result;
}
// Returns the "words" for a feature.
// A word is compok
private void getWordsForFeature(Set<String> output,
String featureName,
Map<String, List<Double>> denseFeatures) {
List<Double> feature = denseFeatures.get(featureName);
if (feature == null) {
return;
}
assert (feature instanceof ArrayList);
Random rnd = new Random(seed);
Deque<Integer> dq = new ArrayDeque<>();
for (int i = 0; i < numWordsPerFeature; i++) {
String word = featureName + i + ':' + getWord(dq, feature, rnd);
output.add(word);
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, List<Double>> denseFeatures = featureVector.getDenseFeatures();
if (denseFeatures == null) {
return;
}
Set<String> output = new HashSet<>();
for (String featureName : fieldNames) {
getWordsForFeature(output, featureName, denseFeatures);
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
stringFeatures.put(outputName, output);
}
}
| 7,243 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CrossTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import lombok.extern.slf4j.Slf4j;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Created by hector_yee on 8/25/14.
* Takes the cross product of stringFeatures named in field1 and field2
* and places it in a stringFeature with family name specified in output.
*/
@Slf4j
public class CrossTransform implements Transform {
private String fieldName1;
private String fieldName2;
private String outputName;
private Set<String> keys1;
private Set<String> keys2;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
outputName = config.getString(key + ".output");
String key1Name = key + ".keys1";
String key2Name = key + ".keys2";
if (config.hasPath(key1Name)) {
keys1 = new HashSet<>(config.getStringList(key1Name));
}
if (config.hasPath(key2Name)) {
keys2 = new HashSet<>(config.getStringList(key2Name));
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) return;
Set<String> set1 = stringFeatures.get(fieldName1);
if (set1 == null || set1.isEmpty()) return;
Set<String> set2 = stringFeatures.get(fieldName2);
if (set2 == null || set2.isEmpty()) return;
Set<String> output = stringFeatures.get(outputName);
if (output == null) {
output = new HashSet<>();
stringFeatures.put(outputName, output);
}
Set<String> localKeys1 = (keys1 == null) ? set1 : Util.getIntersection(keys1, set1);
if (localKeys1.isEmpty()) return;
Set<String> localKeys2 = (keys2 == null) ? set2 : Util.getIntersection(keys2, set2);
if (localKeys2.isEmpty()) return;
cross(localKeys1, localKeys2, output);
}
public static void cross(Set<String> set1, Set<String> set2, Set<String> output) {
for (String s1 : set1) {
String prefix = s1 + '^';
for (String s2 : set2) {
output.add(prefix + s2);
}
}
}
}
| 7,244 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/KdtreeContinuousTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.models.KDTreeModel;
import com.airbnb.aerosolve.core.KDTreeNode;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Inputs = fieldName1 (value1, value2)
* Outputs = list of kdtree nodes and the distance from the split
* This is the continuous version of the kd-tree transform and encodes
* the distance from each splitting plane to the point being queried.
* One can think of this as a tree kernel transform of a point.
*/
public class KdtreeContinuousTransform implements Transform {
private String fieldName1;
private String value1;
private String value2;
private String outputName;
private Integer maxCount;
private Optional<KDTreeModel> modelOptional;
private static final Logger log = LoggerFactory.getLogger(KdtreeContinuousTransform.class);
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
value1 = config.getString(key + ".value1");
value2 = config.getString(key + ".value2");
outputName = config.getString(key + ".output");
maxCount = config.getInt(key + ".max_count");
String modelEncoded = config.getString(key + ".model_base64");
modelOptional = KDTreeModel.readFromGzippedBase64String(modelEncoded);
if (!modelOptional.isPresent()) {
log.error("Could not load KDTree from encoded field");
}
}
@Override
public void doTransform(FeatureVector featureVector) {
if (!modelOptional.isPresent()) {
return;
}
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Double v1 = feature1.get(value1);
Double v2 = feature1.get(value2);
if (v1 == null || v2 == null) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
ArrayList<Integer> result = modelOptional.get().query(v1, v2);
int count = Math.min(result.size(), maxCount);
KDTreeNode[] nodes = modelOptional.get().getNodes();
for (int i = 0; i < count; i++) {
Integer res = result.get(result.size() - 1 - i);
double split = nodes[res].getSplitValue();
switch (nodes[res].getNodeType()) {
case X_SPLIT: {
output.put(res.toString(), v1 - split);
}
break;
case Y_SPLIT: {
output.put(res.toString(), v2 - split);
}
break;
}
}
}
}
| 7,245 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DefaultStringTokenizerTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Map;
import java.util.Set;
/**
* Tokenizes and counts strings using a regex and optionally generates bigrams from the tokens
* "field1" specifies the key of the feature
* "regex" specifies the regex used to tokenize
* "generateBigrams" specifies whether bigrams should also be generated
*/
public class DefaultStringTokenizerTransform implements Transform {
private String fieldName1;
private String regex;
private String outputName;
private boolean generateBigrams;
private String bigramsOutputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
regex = config.getString(key + ".regex");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".generate_bigrams")) {
generateBigrams = config.getBoolean(key + ".generate_bigrams");
} else {
generateBigrams = false;
}
if (generateBigrams) {
bigramsOutputName = config.getString(key + ".bigrams_output");
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) {
return;
}
Set<String> feature1 = stringFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Util.optionallyCreateFloatFeatures(featureVector);
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
Map<String, Double> bigramOutput = null;
NgramTransform.generateOutputTokens(feature1, regex, output, 1, 1);
if (generateBigrams) {
bigramOutput = Util.getOrCreateFloatFeature(bigramsOutputName, floatFeatures);
NgramTransform.generateOutputTokens(feature1, regex, bigramOutput, 2, 2);
}
}
}
| 7,246 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DateDiffTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* output = date_diff(field1, field2)
* get the date difference between dates in features of key "field1" and
* dates in features of key "field2"
*/
public class DateDiffTransform implements Transform {
private String fieldName1;
private String fieldName2;
private String outputName;
private final static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (stringFeatures == null || floatFeatures == null) {
return ;
}
Set<String> feature1 = stringFeatures.get(fieldName1);
Set<String> feature2 = stringFeatures.get(fieldName2);
if (feature1 == null || feature2 == null) {
return ;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
try {
for (String endDateStr : feature1) {
Date endDate = format.parse(endDateStr);
for (String startDateStr : feature2) {
Date startDate = format.parse(startDateStr);
long diff = endDate.getTime() - startDate.getTime();
long diffDays = TimeUnit.DAYS.convert(diff, TimeUnit.MILLISECONDS);
output.put(endDateStr + "-m-" + startDateStr, (double)diffDays);
}
}
} catch (ParseException e) {
e.printStackTrace();
return ;
}
}
}
| 7,247 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CustomLinearLogQuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigObject;
import com.typesafe.config.ConfigValue;
import java.util.*;
import java.util.Map.Entry;
/**
* A custom quantizer that quantizes features based on upper limits and bucket sizes from config
* "field1" specifies feature family name.
* If "select_features" is specified, we only transform features in the select_features list.
* If "exclude_features" is specified, we transform features that are not in the exclude_features list.
* If both "select_features" and "exclude_features" are specified, we transform features that are in
* "select_features" list and not in "exclude_features" list.
*/
public class CustomLinearLogQuantizeTransform implements Transform {
private String fieldName1;
private String outputName;
private TreeMap<Double, Double> limitBucketPairsMap;
private double upperLimit;
private List<String> excludeFeatures;
private List<String> selectFeatures;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".exclude_features")) {
excludeFeatures = config.getStringList(key + ".exclude_features");
}
if (config.hasPath(key + ".select_features")) {
selectFeatures = config.getStringList(key + ".select_features");
}
limitBucketPairsMap =
parseTokensOutOfLimitBucketPairs(config.getObjectList(key + ".limit_bucket"));
upperLimit = limitBucketPairsMap.lastKey();
}
private static TreeMap<Double, Double> parseTokensOutOfLimitBucketPairs(
List<? extends ConfigObject> pairs) {
TreeMap<Double, Double> parsedTokensMap = new TreeMap<>();
for (ConfigObject configObject : pairs) {
List<Entry<String, ConfigValue>> entries = new ArrayList<>(configObject.entrySet());
parsedTokensMap.put(Double.parseDouble(entries.get(0).getKey()),
Double.parseDouble(entries.get(0).getValue().unwrapped().toString()));
}
return parsedTokensMap;
}
private String transformFeature(String featureName,
double featureValue,
StringBuilder sb) {
sb.setLength(0);
sb.append(featureName);
boolean isValueNegative = false;
if (featureValue < 0.0) {
isValueNegative = true;
featureValue = -featureValue;
}
if (featureValue < 1e-2) {
sb.append("=0.0");
} else {
double limit;
double bucket;
if (featureValue >= upperLimit) {
featureValue = upperLimit;
bucket = limitBucketPairsMap.get(upperLimit);
} else {
limit = limitBucketPairsMap.higherKey(featureValue);
bucket = limitBucketPairsMap.get(limit);
}
Double val = TransformUtil.quantize(featureValue, bucket) * 1000;
sb.append('=');
if (isValueNegative) {
sb.append('-');
}
sb.append(val.intValue()/1000.0);
}
return sb.toString();
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null || feature1.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
StringBuilder sb = new StringBuilder();
for (Entry<String, Double> feature : feature1.entrySet()) {
if ((excludeFeatures == null || !excludeFeatures.contains(feature.getKey())) &&
(selectFeatures == null || selectFeatures.contains(feature.getKey()))) {
String transformedFeature = transformFeature(feature.getKey(),
feature.getValue(),
sb);
output.add(transformedFeature);
}
}
}
}
| 7,248 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/LinearLogQuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.Map;
import java.util.Map.Entry;
/**
* A quantizer that starts out with linearly space buckets that get coarser and coarser
* and eventually transitions to log buckets.
*/
public class LinearLogQuantizeTransform implements Transform {
private String fieldName1;
private String outputName;
private static StringBuilder sb;
// Upper limit of each bucket to check if feature value falls in the bucket
private static List<Double> limits;
// Step size used for quantization, for the corresponding limit
private static List<Double> stepSizes;
// Limit beyond which quantized value would be rounded to integer (ignoring decimals)
private static double integerRoundingLimit;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
outputName = config.getString(key + ".output");
sb = new StringBuilder();
limits = new ArrayList<>();
stepSizes = new ArrayList<>();
limits.add(1.0);
stepSizes.add(1.0 / 32.0);
limits.add(10.0);
stepSizes.add(0.125);
limits.add(25.0);
stepSizes.add(0.25);
limits.add(50.0);
stepSizes.add(5.0);
limits.add(100.0);
stepSizes.add(10.0);
limits.add(400.0);
stepSizes.add(25.0);
limits.add(2000.0);
stepSizes.add(100.0);
limits.add(10000.0);
stepSizes.add(250.0);
integerRoundingLimit = 25.0;
}
private static boolean checkAndQuantize(Double featureValue, double limit, double stepSize, boolean integerRounding) {
if (featureValue <= limit) {
if (!integerRounding) {
sb.append(TransformUtil.quantize(featureValue, stepSize));
} else {
sb.append(TransformUtil.quantize(featureValue, stepSize).intValue());
}
return true;
}
return false;
}
private static String logQuantize(String featureName, double featureValue) {
sb.setLength(0);
sb.append(featureName);
sb.append('=');
Double dbl = featureValue;
if (dbl < 0.0) {
sb.append('-');
dbl = -dbl;
}
// At every stage we quantize roughly to a precision 10% of the magnitude.
if (dbl < 1e-2) {
sb.append('0');
} else {
boolean isQuantized = false;
for (int i = 0; i < limits.size(); i++) {
Double limit = limits.get(i);
Double stepSize = stepSizes.get(i);
if (limit > integerRoundingLimit) {
isQuantized = checkAndQuantize(dbl, limit, stepSize, true);
} else {
isQuantized = checkAndQuantize(dbl, limit, stepSize, false);
}
if (isQuantized) {
break;
}
}
if (! isQuantized) {
Double exp = Math.log(dbl) / Math.log(2.0);
Long val = 1L << exp.intValue();
sb.append(val);
}
}
return sb.toString();
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null || feature1.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (Entry<String, Double> feature : feature1.entrySet()) {
output.add(logQuantize(feature.getKey(), feature.getValue()));
}
}
}
| 7,249 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MoveFloatToStringTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Iterator;
import java.util.Set;
import java.util.Map;
import java.util.Map.Entry;
import java.util.List;
/**
* Moves named fields from one family to another. If keys are not specified, all keys are moved
* from the float family. Features are capped via a `cap` config, which defaults to 1e10, to avoid
* exploding string features. The original float feature is removed but can be overridden using
* `keep` boolean config.
*/
public class MoveFloatToStringTransform implements Transform {
private String fieldName1;
private double bucket;
private String outputName;
private List<String> keys;
private double cap;
private boolean keep;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
bucket = config.getDouble(key + ".bucket");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".keys")) {
keys = config.getStringList(key + ".keys");
}
if (config.hasPath(key + ".cap")) {
cap = config.getDouble(key + ".cap");
} else {
cap = 1e10;
}
if (config.hasPath(key + ".keep")) {
keep = config.getBoolean(key + ".keep");
} else {
keep = false;
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null || feature1.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
if (keys != null) {
for (String key : keys) {
moveFloat(feature1, output, key, cap, bucket);
if(!keep) {
feature1.remove(key);
}
}
} else {
for (Iterator<Entry<String, Double>> iterator = feature1.entrySet().iterator();
iterator.hasNext();) {
Entry<String, Double> entry = iterator.next();
String key = entry.getKey();
moveFloat(feature1, output, key, cap, bucket);
if(!keep) {
iterator.remove();
}
}
}
}
public static void moveFloat(
Map<String, Double> feature1,
Set<String> output,
String key,
double cap,
double bucket) {
if (feature1.containsKey(key)) {
Double dbl = feature1.get(key);
if (dbl > cap) {
dbl = cap;
}
Double quantized = TransformUtil.quantize(dbl, bucket);
output.add(key + '=' + quantized);
}
}
}
| 7,250 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DeleteStringFeatureTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.typesafe.config.Config;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DeleteStringFeatureTransform implements Transform {
private String fieldName1;
private List<String> keys;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
keys = config.getStringList(key + ".keys");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) {
return;
}
Set<String> feature1 = stringFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
List<String> toDelete = new ArrayList<String>();
for(String feat : feature1) {
for (String key : keys) {
if (feat.startsWith(key)) {
toDelete.add(feat);
break;
}
}
}
for (String feat : toDelete) {
feature1.remove(feat);
}
}
}
| 7,251 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatToStringTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import lombok.extern.slf4j.Slf4j;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Similar to MoveFloatToStringTransform, however, just move defined float value into String Feature
* not using bucket. This is used when there are certain number of incorrect data,
* i.e. x = 0 doesn't mean it is worse than x = 0.00001, it just somewhere in the pipeline
* make null = 0, so before we fixed the pipeline, convert it to string feature.
*/
@Slf4j
public class FloatToStringTransform implements Transform {
private String fieldName;
private Collection<String> keys;
private Set<Double> values;
private String stringOutputName;
@Override
public void configure(Config config, String key) {
fieldName = config.getString(key + ".field1");
if (config.hasPath(key + ".keys")) {
keys = config.getStringList(key + ".keys");
}
values = new HashSet<>(config.getDoubleList(key + ".values"));
stringOutputName = config.getString(key + ".string_output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.floatFeatures;
if (floatFeatures == null || floatFeatures.isEmpty()) {
return;
}
Map<String, Double> input = floatFeatures.get(fieldName);
if (input == null || input.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> stringOutput = Util.getOrCreateStringFeature(stringOutputName, stringFeatures);
Collection<String> localKeys = (keys == null)? input.keySet() : keys;
log.debug("k {} {}", localKeys, input);
for (String key : localKeys) {
moveFloatToString(
input, key, values, stringOutput);
}
}
private void moveFloatToString(
Map<String, Double> input,
String key, Set<Double> values,
Set<String> stringOutput) {
if (input.containsKey(key)) {
Double inputFloatValue = input.get(key);
if (values.contains(inputFloatValue)) {
String movedFloat = key + "=" + inputFloatValue;
stringOutput.add(movedFloat);
input.remove(key);
}
}
}
}
| 7,252 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/ConvertStringCaseTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.transforms.types.StringTransform;
import com.typesafe.config.Config;
/**
* Converts strings to either all lowercase or all uppercase
* "field1" specifies the key of the feature
* "convert_to_uppercase" converts strings to uppercase if true, otherwise converts to lowercase
* "output" optionally specifies the key of the output feature, if it is not given the transform
* overwrites / replaces the input feature
*/
public class ConvertStringCaseTransform extends StringTransform {
private boolean convertToUppercase;
@Override
public void init(Config config, String key) {
convertToUppercase = config.getBoolean(key + ".convert_to_uppercase");
}
@Override
public String processString(String rawString) {
if (rawString == null) {
return null;
}
String convertedString;
if (convertToUppercase) {
convertedString = rawString.toUpperCase();
} else {
convertedString = rawString.toLowerCase();
}
return convertedString;
}
}
| 7,253 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CustomRangeQuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.typesafe.config.Config;
/**
* A custom quantizer that quantizes features based on the specified range in the config. "field1":
* specifies feature family name If "select_features" is specified, we only transform features in
* the select_features list, otherwise, we transform all features in the feature family thresholds:
* specifies how do we quantize the feature, for example: if we set thresholds: [0.1, 0.5] the
* transformer will bucketize features into three buckets: the first with feature value <= 0.1, the
* second with features value > 0.1 and <=0.5; and the third with value > 0.5. The quantized
* features are put under a new feature family specified by "output"
* Note that the transformer assumes thresholds are in ascending order
*/
public class CustomRangeQuantizeTransform implements Transform {
private String fieldName1;
private List<Double> thresholds;
private String outputName;
private List<String> selectFeatures;
private static void getQuantizedFeatures(List<Double> thresholds,
String featureName,
Double featureValue,
Set<String> output) {
double tMin = thresholds.get(0);
double tMax = thresholds.get(thresholds.size() - 1);
if (featureValue <= tMin) {
output.add(featureName + "<=" + Double.toString(tMin));
return;
}
if (featureValue > tMax) {
output.add(featureName + ">" + Double.toString(tMax));
return;
}
for (int i = 0; i < thresholds.size() - 1; i++) {
double t0 = thresholds.get(i);
double t1 = thresholds.get(i + 1);
if (featureValue > t0 && featureValue <= t1) {
output.add(Double.toString(t0) + "<" + featureName + "<=" + Double.toString(t1));
return;
}
}
}
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
thresholds = config.getDoubleList(key + ".thresholds");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".select_features")) {
selectFeatures = config.getStringList(key + ".select_features");
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (Map.Entry<String, Double> feature : feature1.entrySet()) {
if ((selectFeatures == null || selectFeatures.contains(feature.getKey()))) {
getQuantizedFeatures(thresholds,
feature.getKey(),
feature.getValue(),
output);
}
}
}
}
| 7,254 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DecisionTreeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.ModelRecord;
import com.airbnb.aerosolve.core.models.DecisionTreeModel;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.io.Serializable;
import java.util.*;
import java.util.Map.Entry;
/**
* Applies a decision tree transform to existing float features.
* Emits the binary leaf features to the string family output_leaves
* Emits the score to the float family output_score
* Use tree.toHumanReadableTransform to generate the nodes list.
*/
public class DecisionTreeTransform implements Transform {
private String outputLeaves;
private String outputScoreFamily;
private String outputScoreName;
private DecisionTreeModel tree;
@Override
public void configure(Config config, String key) {
outputLeaves = config.getString(key + ".output_leaves");
outputScoreFamily = config.getString(key + ".output_score_family");
outputScoreName = config.getString(key + ".output_score_name");
List<String> nodes = config.getStringList(key + ".nodes");
tree = DecisionTreeModel.fromHumanReadableTransform(nodes);
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> outputString = Util.getOrCreateStringFeature(outputLeaves, stringFeatures);
Map<String, Double> outputFloat = Util.getOrCreateFloatFeature(outputScoreFamily, floatFeatures);
int leafIdx = tree.getLeafIndex(floatFeatures);
ModelRecord rec = tree.getStumps().get(leafIdx);
outputString.add(rec.featureName);
outputFloat.put(outputScoreName, rec.featureWeight);
}
}
| 7,255 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/ProductTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.HashMap;
/**
* Computes the polynomial product of all values in field1
* i.e. prod_i 1 + x_i
* and places the result in outputName
*/
public class ProductTransform implements Transform {
private String fieldName1;
private List<String> keys;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
outputName = config.getString(key + ".output");
keys = config.getStringList(key + ".keys");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Double prod = 1.0;
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String key : keys) {
Double dbl = feature1.get(key);
if (dbl != null) {
prod *= 1.0 + dbl;
}
}
output.put("*", prod);
}
}
| 7,256 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/NearestTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.HashMap;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
/**
* output = nearest of (field1, field2.key)
*/
public class NearestTransform implements Transform {
private String fieldName1;
private String fieldName2;
private String key2;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
key2 = config.getString(key + ".key");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> feature2 = floatFeatures.get(fieldName2);
if (feature2 == null) {
return;
}
Double sub = feature2.get(key2);
if (sub == null) {
return;
}
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
String nearest = "nothing";
double bestDist = 1e10;
for (Entry<String, Double> f1 : feature1.entrySet()) {
double dist = Math.abs(f1.getValue() - sub);
if (dist < bestDist) {
nearest = f1.getKey();
bestDist = dist;
}
}
output.add(key2 + "~=" + nearest);
Util.optionallyCreateStringFeatures(featureVector);
}
}
| 7,257 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/CapFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.transforms.types.FloatTransform;
import com.typesafe.config.Config;
import java.util.List;
import java.util.Map;
public class CapFloatTransform extends FloatTransform {
private List<String> keys;
private double lowerBound;
private double upperBound;
@Override
public void init(Config config, String key) {
keys = config.getStringList(key + ".keys");
lowerBound = config.getDouble(key + ".lower_bound");
upperBound = config.getDouble(key + ".upper_bound");
}
@Override
public void output(Map<String, Double> input, Map<String, Double> output) {
for (String key : keys) {
Double v = input.get(key);
if (v != null) {
output.put(key, Math.min(upperBound, Math.max(lowerBound, v)));
}
}
}
}
| 7,258 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/TransformFactory.java | package com.airbnb.aerosolve.core.transforms;
import com.google.common.base.CaseFormat;
import com.typesafe.config.Config;
/**
* Created by hector_yee on 8/25/14.
*/
public class TransformFactory {
public static Transform createTransform(Config config, String key) {
if (config == null || key == null) {
return null;
}
String transformName = config.getString(key + ".transform");
if (transformName == null) {
return null;
}
String name = CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, transformName);
Transform result;
try {
result = (Transform) Class.forName("com.airbnb.aerosolve.core.transforms." + name + "Transform").newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
result.configure(config, key);
return result;
}
}
| 7,259 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatCrossFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import java.util.Map;
import com.typesafe.config.Config;
/**
* Takes the floats in fieldName1, quantizes them into buckets, converts them to strings, then
* crosses them with the floats in fieldName2 and then stores the result in a new float feature
* output specified by outputName.
*/
public class FloatCrossFloatTransform implements Transform {
private String fieldName1;
private double bucket;
private double cap;
private String fieldName2;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
bucket = config.getDouble(key + ".bucket");
if (config.hasPath(key + ".cap")) {
cap = config.getDouble(key + ".cap");
} else {
cap = 1e10;
}
fieldName2 = config.getString(key + ".field2");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.floatFeatures;
if (floatFeatures == null || floatFeatures.isEmpty()) {
return;
}
Map<String, Double> map1 = floatFeatures.get(fieldName1);
if (map1 == null || map1.isEmpty()) {
return;
}
Map<String, Double> map2 = floatFeatures.get(fieldName2);
if (map2 == null || map2.isEmpty()) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (Map.Entry<String, Double> entry1 : map1.entrySet()) {
String float1Key = entry1.getKey();
Double float1Value = entry1.getValue();
if (float1Value > cap) {
float1Value = cap;
}
Double float1Quantized = TransformUtil.quantize(float1Value, bucket);
for (Map.Entry<String, Double> entry2 : map2.entrySet()) {
String float2Key = entry2.getKey();
Double float2Value = entry2.getValue();
String outputKey = float1Key + "=" + float1Quantized + "^" + float2Key;
output.put(outputKey, float2Value);
}
}
}
}
| 7,260 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MultiscaleMoveFloatToStringTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Set;
import java.util.Map;
import java.util.List;
/**
* Moves named fields from one family to another.
*/
public class MultiscaleMoveFloatToStringTransform implements Transform {
private String fieldName1;
private List<Double> buckets;
private String outputName;
private List<String> keys;
private double cap;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
buckets = config.getDoubleList(key + ".buckets");
outputName = config.getString(key + ".output");
keys = config.getStringList(key + ".keys");
try {
cap = config.getDouble(key + ".cap");
} catch (Exception e) {
cap = 1e10;
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (String key : keys) {
if (feature1.containsKey(key)) {
Double dbl = feature1.get(key);
if (dbl > cap) {
dbl = cap;
}
for (Double bucket : buckets) {
Double quantized = TransformUtil.quantize(dbl, bucket);
output.add(key + '[' + bucket + "]=" + quantized);
}
feature1.remove(key);
}
}
}
}
| 7,261 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MultiscaleGridContinuousTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.*;
import java.util.Map.Entry;
/**
* Quantizes the floatFeature named in "field1" with buckets in "bucket" before placing
* it in the floatFeature named "output" subtracting the origin of the box.
*/
public class MultiscaleGridContinuousTransform implements Transform {
private String fieldName1;
private List<Double> buckets;
private String outputName;
private String value1;
private String value2;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
buckets = config.getDoubleList(key + ".buckets");
outputName = config.getString(key + ".output");
value1 = config.getString(key + ".value1");
value2 = config.getString(key + ".value2");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Double v1 = feature1.get(value1);
Double v2 = feature1.get(value2);
if (v1 == null || v2 == null) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
transformFeature(v1, v2, output);
}
public void transformFeature(double v1,
double v2,
Map<String, Double> output) {
for (Double bucket : buckets) {
transformFeature(v1, v2, bucket, output);
}
}
public static void transformFeature(double v1,
double v2,
double bucket,
Map<String, Double> output) {
Double mult1 = v1 / bucket;
double q1 = bucket * mult1.intValue();
Double mult2 = v2 / bucket;
double q2 = bucket * mult2.intValue();
String bucketName = "[" + bucket + "]=(" + q1 + ',' + q2 + ')';
output.put(bucketName + "@1", v1 - q1);
output.put(bucketName + "@2", v2 - q2);
}
}
| 7,262 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/QuantizeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Set;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
/**
* Created by hector_yee on 8/25/14.
* Multiplies the floatFeature named in "field1" with "scale" before placing
* it in the stringFeature named "output"
*/
public class QuantizeTransform implements Transform {
private String fieldName1;
private double scale;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
scale = config.getDouble(key + ".scale");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null || feature1.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (Entry<String, Double> feature : feature1.entrySet()) {
transformAndAddFeature(scale,
feature.getKey(),
feature.getValue(),
output);
}
}
public static void transformAndAddFeature(Double scale,
String featureName,
Double featureValue,
Set<String> output) {
Double dbl = featureValue * scale;
int val = dbl.intValue();
output.add(featureName + '=' + val);
}
}
| 7,263 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/StuffIdTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Map;
/**
* id = fieldName1.key1
* feature value = fieldName2.key2
* output[ fieldname2 @ id ] = feature value
* This transform is useful for making cross products of categorical features
* e.g. leaf_id (say 123) and a continuous variable e.g. searches_at_leaf (say 4.0)
* and making a new feature searches_at_leaf @ 123 = 4.0
* The original searches_at_leaf feature can compare quantities at a global level
* say searches in one market vs another market.
* On the other hand searches_at_leaf @ 123 can tell you how the model changes
* for searches at a particular place changing from day to day.
*/
public class StuffIdTransform implements Transform {
private String fieldName1;
private String fieldName2;
private String key1;
private String key2;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
key1 = config.getString(key + ".key1");
key2 = config.getString(key + ".key2");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> feature2 = floatFeatures.get(fieldName2);
if (feature2 == null) {
return;
}
Double v1 = feature1.get(key1);
Double v2 = feature2.get(key2);
if (v1 == null || v2 == null) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
String newname = key2 + '@' + v1.longValue();
output.put(newname, v2);
}
}
| 7,264 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/NormalizeUtf8Transform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.transforms.types.StringTransform;
import java.text.Normalizer;
import com.typesafe.config.Config;
/**
* Normalizes strings to UTF-8 NFC, NFD, NFKC or NFKD form (NFD by default)
* "field1" specifies the key of the feature
* "normalization_form" optionally specifies whether to use NFC, NFD, NFKC or NFKD form
* "output" optionally specifies the key of the output feature, if it is not given the transform
* overwrites / replaces the input feature
*/
public class NormalizeUtf8Transform extends StringTransform {
public static final Normalizer.Form DEFAULT_NORMALIZATION_FORM = Normalizer.Form.NFD;
private Normalizer.Form normalizationForm;
@Override
public void init(Config config, String key) {
String normalizationFormString = DEFAULT_NORMALIZATION_FORM.name();
if (config.hasPath(key + ".normalization_form")) {
normalizationFormString = config.getString(key + ".normalization_form");
}
if (normalizationFormString.equalsIgnoreCase("NFC")) {
normalizationForm = Normalizer.Form.NFC;
} else if (normalizationFormString.equalsIgnoreCase("NFD")) {
normalizationForm = Normalizer.Form.NFD;
} else if (normalizationFormString.equalsIgnoreCase("NFKC")) {
normalizationForm = Normalizer.Form.NFKC;
} else if (normalizationFormString.equalsIgnoreCase("NFKD")) {
normalizationForm = Normalizer.Form.NFKD;
} else {
normalizationForm = DEFAULT_NORMALIZATION_FORM;
}
}
@Override
public String processString(String rawString) {
if (rawString == null) {
return null;
}
return Normalizer.normalize(rawString, normalizationForm);
}
}
| 7,265 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/NgramTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.transforms.Transform;
import com.airbnb.aerosolve.core.util.Util;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.typesafe.config.Config;
/**
* Tokenizes strings using a regex and generates ngrams from the tokens
* "field1" specifies the key of the feature
* "regex" specifies the regex used to tokenize
* "n" specifies the size of the ngrams
* "min_n" optional parameter, if specified ngrams from min_n (inclusive) to n (inclusive) will
* be generated and placed in the output
*/
public class NgramTransform implements Transform {
public static final String BIGRAM_SEPARATOR = " ";
private String fieldName1;
private String regex;
private int n;
private int minN;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
regex = config.getString(key + ".regex");
n = config.getInt(key + ".n");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".min_n")) {
minN = config.getInt(key + ".min_n");
} else {
minN = n;
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) {
return ;
}
Set<String> input = stringFeatures.get(fieldName1);
if (input == null) {
return;
}
Util.optionallyCreateFloatFeatures(featureVector);
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
generateOutputTokens(input, regex, output, minN, n);
}
public static void generateOutputTokens(
Set<String> input,
String regex,
Map<String, Double> output,
int minN,
int n) {
for (String rawString : input) {
if (rawString == null) continue;
List<String> cleanedTokens = generateCleanedTokens(rawString, regex);
if (cleanedTokens.size() < minN) {
continue;
}
for (int i = minN; i <= n; ++i) {
List<String> ngrams = generateNgrams(cleanedTokens, i);
for (String ngram : ngrams) {
incrementOutput(ngram, output);
}
}
}
}
public static List<String> generateCleanedTokens(String rawString, String regex) {
ArrayList<String> cleanedTokens = new ArrayList<>();
if (rawString == null) {
return cleanedTokens;
}
String[] rawTokens = rawString.split(regex);
for (String token : rawTokens) {
if (token != null && token.length() > 0) {
cleanedTokens.add(token);
}
}
return cleanedTokens;
}
public static List<String> generateNgrams(List<String> tokens, int n) {
List<String> ngrams = new LinkedList<>();
if (n < 1 || tokens == null) {
return ngrams;
}
for (int i = 0; i <= (tokens.size() - n); ++i) {
ngrams.add(concatenate(tokens, i, (i + n)));
}
return ngrams;
}
private static String concatenate(List<String> tokens, int start, int end) {
StringBuilder sb = new StringBuilder();
for (int i = start; i < end; ++i) {
String token = tokens.get(i);
if (i > start) {
sb.append(BIGRAM_SEPARATOR);
}
sb.append(token);
}
return sb.toString();
}
private static void incrementOutput(String key, Map<String, Double> output) {
if (key == null || output == null) {
return;
}
if (output.containsKey(key)) {
double count = output.get(key);
output.put(key, (count + 1.0));
} else {
output.put(key, 1.0);
}
}
}
| 7,266 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DeleteFloatFeatureFamilyTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import com.typesafe.config.Config;
/**
* "fields" specifies a list of float feature families to be deleted
*/
public class DeleteFloatFeatureFamilyTransform implements Transform {
private List<String> fieldNames;
@Override
public void configure(Config config, String key) {
fieldNames = config.getStringList(key + ".fields");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
if (fieldNames == null) {
return;
}
for (String fieldName: fieldNames) {
Map<String, Double> feature = floatFeatures.get(fieldName);
if (feature != null) {
floatFeatures.remove(fieldName);
}
}
}
}
| 7,267 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatToDenseTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/*
Turn several float features into one dense feature, feature number must > 1
1. IF all float features are null, create a string feature,
with family name string_output, feature name output^null
2. IF only one float feature is not null, create a float feature
with family name same as family of the only not null float feature
3. Other cases create dense features
both 2 and 3, feature name: output^key keys.
*/
public class FloatToDenseTransform implements Transform{
private List<String> fields;
private List<String> keys;
private static final int featureAVGSize = 16;
@Override
public void configure(Config config, String key) {
keys = config.getStringList(key + ".keys");
fields = config.getStringList(key + ".fields");
if (fields.size() != keys.size() || fields.size() <= 1) {
String msg = String.format("fields size {} keys size {}", fields.size(), keys.size());
throw new RuntimeException(msg);
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
int size = fields.size();
StringBuilder sb = new StringBuilder((size + 1) * featureAVGSize);
List<Double> output = new ArrayList<>(size);
for (int i = 0; i < size; ++i) {
String familyName = fields.get(i);
Map<String, Double> family = floatFeatures.get(familyName);
if (family != null) {
Double feature = family.get(keys.get(i));
if (feature != null) {
if (i > 0) {
sb.append('^');
}
String featureName = keys.get(i);
sb.append(featureName);
output.add(feature);
} else {
return;
}
} else {
return;
}
}
Util.setDenseFeature(featureVector, sb.toString(), output);
}
}
| 7,268 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/FloatLabelTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import java.util.Map;
import com.typesafe.config.Config;
/**
* Convert a Float feature to binary LABEL based on threshold value. This is intended for
* binary classification where LABEL is either -1 or 1. MergeStrategy describe how existing
* LABEL is treated.
*
* The purpose of MergeStrategy is to help combining float features into LABEL. For example,
* using OVERRIDE_NEGATIVE is equivalent to LABEL with feature1 >= threshold1 or feature2 >= threshold2,
* using OVERRIDE_POSITIVE is equivalent to LABEL with feature1 >= threshold1 and feature2 >= threshold2.
*/
public class FloatLabelTransform implements Transform {
enum MergeStrategy {
OVERRIDE, // override existing label
OVERRIDE_NEGATIVE, // override negative label but keep positive label
OVERRIDE_POSITIVE, // override positive label but keep negative label
SKIP // preserve existing label (only replace if observation is un-labeled)
}
private String fieldName1;
private String key1;
private double threshold;
private MergeStrategy mergeStrategy;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
key1 = config.getString(key + ".key1");
threshold = config.getDouble(key + ".threshold");
mergeStrategy = MergeStrategy.valueOf(config.getString(key + ".merge").toUpperCase());
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Double> labelFeature = Util.getOrCreateFloatFeature("LABEL", featureVector.floatFeatures);
Double label = labelFeature.get("");
if (mergeStrategy == MergeStrategy.SKIP && label != null) return;
Map<String, Double> floatFeature = featureVector.floatFeatures.get(fieldName1);
if (floatFeature != null) {
Double featureValue = floatFeature.get(key1);
if (featureValue != null) {
double newLabel = featureValue >= threshold ? 1 : -1;
if (
// fill in missing label
label == null ||
// ignore existing label
mergeStrategy == MergeStrategy.OVERRIDE ||
// override negative label
(mergeStrategy == MergeStrategy.OVERRIDE_NEGATIVE && label < 0 && newLabel > 0) ||
// override positive label
(mergeStrategy == MergeStrategy.OVERRIDE_POSITIVE && label > 0 && newLabel < 0)
) {
labelFeature.put("", newLabel);
}
}
}
}
}
| 7,269 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/KdtreeTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.airbnb.aerosolve.core.models.KDTreeModel;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Inputs = fieldName1 (value1, value2)
* Outputs = list of kdtree nodes
*/
public class KdtreeTransform implements Transform {
private String fieldName1;
private String value1;
private String value2;
private String outputName;
private Integer maxCount;
private Optional<KDTreeModel> modelOptional;
private static final Logger log = LoggerFactory.getLogger(KdtreeTransform.class);
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
value1 = config.getString(key + ".value1");
value2 = config.getString(key + ".value2");
outputName = config.getString(key + ".output");
maxCount = config.getInt(key + ".max_count");
String modelEncoded = config.getString(key + ".model_base64");
modelOptional = KDTreeModel.readFromGzippedBase64String(modelEncoded);
if (!modelOptional.isPresent()) {
log.error("Could not load KDTree from encoded field");
}
}
@Override
public void doTransform(FeatureVector featureVector) {
if (!modelOptional.isPresent()) {
return;
}
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Double v1 = feature1.get(value1);
Double v2 = feature1.get(value2);
if (v1 == null || v2 == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
ArrayList<Integer> result = modelOptional.get().query(v1, v2);
int count = Math.min(result.size(), maxCount);
for (int i = 0; i < count; i++) {
Integer res = result.get(result.size() - 1 - i);
output.add(res.toString());
}
}
}
| 7,270 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/ListTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.typesafe.config.Config;
import java.util.ArrayList;
import java.util.List;
/**
* Created by hector_yee on 8/25/14.
* A transform that accepts a list of other transforms and applies them as a group
* in the order specified by the list.
*/
public class ListTransform implements Transform {
private List<Transform> transforms;
@Override
public void configure(Config config, String key) {
transforms = new ArrayList<>();
List<String> transformKeys = config.getStringList(key + ".transforms");
for (String transformKey : transformKeys) {
Transform tmpTransform = TransformFactory.createTransform(config, transformKey);
if (tmpTransform != null) {
transforms.add(tmpTransform);
}
}
}
@Override
public void doTransform(FeatureVector featureVector) {
for (Transform transform : transforms) {
transform.doTransform(featureVector);
}
}
@Override
public void doTransform(Iterable<FeatureVector> featureVectors) {
// collect stream because they can only be operated once
for (Transform transform : transforms) {
transform.doTransform(featureVectors);
}
}
}
| 7,271 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/DateValTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.text.SimpleDateFormat;
import java.util.*;
import java.text.ParseException;
/**
* Get the date value from date string
* "field1" specifies the key of feature
* "field2" specifies the type of date value
*/
public class DateValTransform implements Transform {
protected String fieldName1;
protected String dateType;
protected String outputName;
protected final static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
dateType = config.getString(key + ".date_type");
outputName = config.getString(key + ".output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) {
return ;
}
Set<String> feature1 = stringFeatures.get(fieldName1);
if (feature1 == null) {
return ;
}
Util.optionallyCreateFloatFeatures(featureVector);
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String dateStr: feature1) {
try {
Date date = format.parse(dateStr);
Calendar cal = Calendar.getInstance();
cal.setTime(date);
double dateVal;
switch (dateType) {
case "day_of_month":
dateVal = cal.get(Calendar.DAY_OF_MONTH);
break;
case "day_of_week":
dateVal = cal.get(Calendar.DAY_OF_WEEK);
break;
case "day_of_year":
dateVal = cal.get(Calendar.DAY_OF_YEAR);
break;
case "year":
dateVal = cal.get(Calendar.YEAR);
break;
case "month":
dateVal = cal.get(Calendar.MONTH) + 1;
break;
default:
return ;
}
output.put(dateStr, dateVal);
} catch (ParseException e) {
e.printStackTrace();
continue ;
}
}
}
}
| 7,272 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/StumpTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.io.Serializable;
import java.util.*;
import java.util.Map.Entry;
/**
* Applies boosted stump transform to float features and places them in string feature output.
* The format for a stump feature family, feature name, threshold, descriptive name
* You can obtain the stumps from a BoostedStump model in spark shell using
* val model = sc.textFile(name).map(Util.decodeModel).take(10).map(x =>
* "%s,%s,%f".format(x.featureFamily,x.featureName,x.threshold)).foreach(println)
*/
public class StumpTransform implements Transform {
private String outputName;
private class StumpDescription implements Serializable {
public StumpDescription(String featureName, Double threshold, String descriptiveName) {
this.featureName = featureName;
this.threshold = threshold;
this.descriptiveName = descriptiveName;
}
public String featureName;
public Double threshold;
public String descriptiveName;
}
// Family -> description
private Map<String, List<StumpDescription>> thresholds;
@Override
public void configure(Config config, String key) {
outputName = config.getString(key + ".output");
thresholds = new HashMap<>();
List<String> stumps = config.getStringList(key + ".stumps");
for (String stump : stumps) {
String[] tokens = stump.split(",");
if (tokens.length == 4) {
String family = tokens[0];
String featureName = tokens[1];
Double threshold = Double.parseDouble(tokens[2]);
String descriptiveName = tokens[3];
List<StumpDescription> featureList = thresholds.get(family);
if (featureList == null) {
featureList = new ArrayList<>();
thresholds.put(family, featureList);
}
StumpDescription description = new StumpDescription(featureName,
threshold,
descriptiveName);
featureList.add(description);
}
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
for (Entry<String, List<StumpDescription>> stumpFamily : thresholds.entrySet()) {
Map<String, Double> feature = floatFeatures.get(stumpFamily.getKey());
if (feature == null) continue;
for (StumpDescription desc : stumpFamily.getValue()) {
Double value = feature.get(desc.featureName);
if (value != null && value >= desc.threshold) {
output.add(desc.descriptiveName);
}
}
}
}
}
| 7,273 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/StringCrossFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.features.Features;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.*;
/*
if no keys1/keys2 provided, then cross whole family.
otherwise cross features.
keys1 is string features, and keys2 is float features.
to cross RAW string features, put Features.RAW in keys1
*/
public class StringCrossFloatTransform implements Transform {
private String fieldName1;
// optional
private Set<String> keys1;
private String fieldName2;
// optional
private Set<String> keys2;
private String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
outputName = config.getString(key + ".output");
if (config.hasPath(key + ".keys1")) {
keys1 = new HashSet<>(config.getStringList(key + ".keys1"));
}
if (config.hasPath(key + ".keys2")) {
keys2 = new HashSet<>(config.getStringList(key + ".keys2"));
}
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.stringFeatures;
Map<String, Map<String, Double>> floatFeatures = featureVector.floatFeatures;
if (stringFeatures == null || stringFeatures.isEmpty()) return;
if (floatFeatures == null || floatFeatures.isEmpty()) return;
Set<String> list1 = stringFeatures.get(fieldName1);
if (list1 == null || list1.isEmpty()) return;
Map<String, Double> list2 = floatFeatures.get(fieldName2);
if (list2 == null || list2.isEmpty()) return;
if (keys1 != null) {
Set<String> joint = new HashSet<>();
for (String s1 : list1) {
String p = Features.getStringFeatureName(s1);
if (keys1.contains(p)) {
joint.add(s1);
}
}
if (joint.isEmpty()) return;
list1 = joint;
}
if (keys2 != null) {
Map<String, Double> joint = new HashMap<>();
for (Map.Entry<String, Double> s2 : list2.entrySet()) {
if (keys2.contains(s2.getKey())) {
joint.put(s2.getKey(), s2.getValue());
}
}
if (joint.isEmpty()) return;
list2 = joint;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
for (String s1 : list1) {
for (Map.Entry<String, Double> s2 : list2.entrySet()) {
output.put(s1 + "^" + s2.getKey(), s2.getValue());
}
}
}
}
| 7,274 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/MoveFloatToStringAndFloatTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.TransformUtil;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
/**
* Takes the floats in the keys of fieldName1 (or if keys are not specified, all floats) and
* quantizes them into buckets. If the quantized float is less than or equal to a maximum specified
* bucket value or greater than or equal to a minimum specified bucket value, then the quantized
* float is stored as a string in a new string feature output specified by stringOutputName.
* Otherwise, the original, unchanged float is stored in a new float feature output specified by
* floatOutputName. The input float feature remains unchanged.
*/
public class MoveFloatToStringAndFloatTransform implements Transform {
private String fieldName1;
private Collection<String> keys;
private double bucket;
private double maxBucket;
private double minBucket;
private String stringOutputName;
private String floatOutputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
if (config.hasPath(key + ".keys")) {
keys = config.getStringList(key + ".keys");
}
bucket = config.getDouble(key + ".bucket");
maxBucket = config.getDouble(key + ".max_bucket");
minBucket = config.getDouble(key + ".min_bucket");
stringOutputName = config.getString(key + ".string_output");
floatOutputName = config.getString(key + ".float_output");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.floatFeatures;
if (floatFeatures == null || floatFeatures.isEmpty()) {
return;
}
Map<String, Double> input = floatFeatures.get(fieldName1);
if (input == null || input.isEmpty()) {
return;
}
Util.optionallyCreateStringFeatures(featureVector);
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
Set<String> stringOutput = Util.getOrCreateStringFeature(stringOutputName, stringFeatures);
Map<String, Double> floatOutput = Util.getOrCreateFloatFeature(floatOutputName, floatFeatures);
Collection<String> localKeys = (keys == null)? input.keySet() : keys;
for (String key : localKeys) {
moveFloatToStringAndFloat(
input, key, bucket, minBucket, maxBucket, stringOutput, floatOutput);
}
}
private static void moveFloatToStringAndFloat(
Map<String, Double> input,
String key,
double bucket,
double minBucket,
double maxBucket,
Set<String> stringOutput,
Map<String, Double> floatOutput) {
if (input.containsKey(key)) {
Double inputFloatValue = input.get(key);
Double inputFloatQuantized = TransformUtil.quantize(inputFloatValue, bucket);
if (inputFloatQuantized >= minBucket && inputFloatQuantized <= maxBucket) {
String movedFloat = key + "=" + inputFloatQuantized;
stringOutput.add(movedFloat);
} else {
floatOutput.put(key, inputFloatValue);
}
}
}
}
| 7,275 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/ApproximatePercentileTransform.java | package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Map;
/**
* Given a fieldName1, low, upper key
* Remaps fieldName2's key2 value such that low = 0, upper = 1.0 thus approximating
* the percentile using linear interpolation.
*/
public class ApproximatePercentileTransform implements Transform {
private String fieldName1;
private String fieldName2;
private String lowKey;
private String upperKey;
private String key2;
private String outputName;
private String outputKey;
private double minDiff;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
fieldName2 = config.getString(key + ".field2");
lowKey = config.getString(key + ".low");
upperKey = config.getString(key + ".upper");
minDiff = config.getDouble(key + ".minDiff");
key2 = config.getString(key + ".key2");
outputName = config.getString(key + ".output");
outputKey = config.getString(key + ".outputKey");
}
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> feature1 = floatFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
Map<String, Double> feature2 = floatFeatures.get(fieldName2);
if (feature2 == null) {
return;
}
Double val = feature2.get(key2);
if (val == null) {
return;
}
Double low = feature1.get(lowKey);
Double upper = feature1.get(upperKey);
if (low == null || upper == null) {
return;
}
// Abstain if the percentiles are too close.
double denom = upper - low;
if (denom < minDiff) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
Double outVal = 0.0;
if (val <= low) {
outVal = 0.0;
} else if (val >= upper) {
outVal = 1.0;
} else {
outVal = (val - low) / denom;
}
output.put(outputKey, outVal);
}
}
| 7,276 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/types/FloatTransform.java | package com.airbnb.aerosolve.core.transforms.types;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.transforms.Transform;
import com.airbnb.aerosolve.core.util.Util;
import com.typesafe.config.Config;
import java.util.Map;
public abstract class FloatTransform implements Transform {
protected String fieldName1;
protected String outputName; // output family name, if not specified, output to fieldName1
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
if (config.hasPath(key + ".output")) {
outputName = config.getString(key + ".output");
} else {
outputName = fieldName1;
}
init(config, key);
}
protected abstract void init(Config config, String key);
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Map<String, Double>> floatFeatures = featureVector.getFloatFeatures();
if (floatFeatures == null) {
return;
}
Map<String, Double> input = floatFeatures.get(fieldName1);
if (input == null) {
return;
}
Map<String, Double> output = Util.getOrCreateFloatFeature(outputName, floatFeatures);
output(input, output);
}
protected abstract void output(Map<String, Double> input, Map<String, Double> output);
}
| 7,277 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/transforms/types/StringTransform.java | package com.airbnb.aerosolve.core.transforms.types;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.transforms.Transform;
import com.airbnb.aerosolve.core.util.Util;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.typesafe.config.Config;
/**
* Abstract representation of a transform that processes all strings in a string feature and
* outputs a new string feature or overwrites /replaces the input string feature.
* "field1" specifies the key of the feature
* "output" optionally specifies the key of the output feature, if it is not given the transform
* overwrites / replaces the input feature
*/
public abstract class StringTransform implements Transform {
protected String fieldName1;
protected String outputName;
@Override
public void configure(Config config, String key) {
fieldName1 = config.getString(key + ".field1");
if (config.hasPath(key + ".output")) {
outputName = config.getString(key + ".output");
} else {
outputName = fieldName1;
}
init(config, key);
}
protected abstract void init(Config config, String key);
@Override
public void doTransform(FeatureVector featureVector) {
Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures();
if (stringFeatures == null) {
return;
}
Set<String> feature1 = stringFeatures.get(fieldName1);
if (feature1 == null) {
return;
}
HashSet<String> processedStrings = new HashSet<>();
for (String rawString : feature1) {
if (rawString != null) {
String processedString = processString(rawString);
processedStrings.add(processedString);
}
}
Set<String> output = Util.getOrCreateStringFeature(outputName, stringFeatures);
// Check reference equality to determine whether the output should overwrite the input
if (output == feature1) {
output.clear();
}
output.addAll(processedStrings);
}
public abstract String processString(String rawString);
}
| 7,278 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/scoring/ModelConfig.java | package com.airbnb.aerosolve.core.scoring;
import lombok.Getter;
import lombok.experimental.Builder;
@Builder
public class ModelConfig {
@Getter
private final String configName;
@Getter
private final String key;
@Getter
private final String modelName;
} | 7,279 |
0 | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core | Create_ds/aerosolve/core/src/main/java/com/airbnb/aerosolve/core/scoring/ModelScorer.java | package com.airbnb.aerosolve.core.scoring;
import com.airbnb.aerosolve.core.Example;
import com.airbnb.aerosolve.core.FeatureVector;
import com.airbnb.aerosolve.core.models.AbstractModel;
import com.airbnb.aerosolve.core.models.ModelFactory;
import com.airbnb.aerosolve.core.transforms.Transformer;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.extern.slf4j.Slf4j;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
@Slf4j
public class ModelScorer {
private final AbstractModel model;
private final Transformer transformer;
public ModelScorer(BufferedReader reader, ModelConfig model) throws IOException {
Optional<AbstractModel> modelOpt = ModelFactory.createFromReader(reader);
this.model = modelOpt.get();
Config modelConfig = ConfigFactory.load(model.getConfigName());
this.transformer = new Transformer(modelConfig, model.getKey());
}
/*
this assumes model file in resource folder, i.e. test/resources/ in unit test
*/
public ModelScorer(ModelConfig model) throws IOException {
this(new BufferedReader(new InputStreamReader(
ModelScorer.class.getResourceAsStream("/" + model.getModelName()))),
model);
}
public double rawProbability(Example example) {
return model.scoreProbability(score(example));
}
public float score(Example example) {
FeatureVector featureVector = example.getExample().get(0);
transformer.combineContextAndItems(example);
return model.scoreItem(featureVector);
}
}
| 7,280 |
0 | Create_ds/aerosolve/airlearner/airlearner-utils/src/main/java/com/airbnb/common | Create_ds/aerosolve/airlearner/airlearner-utils/src/main/java/com/airbnb/common/config/AirCon.java | package com.airbnb.common.config;
import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigParseOptions;
import com.typesafe.config.ConfigResolveOptions;
/**
* Airbnb extension of HOCON language.
* https://github.com/typesafehub/config/blob/master/HOCON.md
*/
@Slf4j
public class AirCon {
protected MacroExecutor executor = null;
// Since we modify Config via system properties, we have to clean it up. If not, it will affect
// the next AirCon "load" call.
private Set<String> keysToCleanUp = new HashSet<>();
// List of Exceptions.
static class InvalidMacro extends RuntimeException {
InvalidMacro(String message) {
super(message);
}
}
static class InvalidRegexPattern extends RuntimeException {
InvalidRegexPattern(String message) {
super(message);
}
}
// A drop-in replacement for ConfigFactory.load(...).
static public Config load(String resourceBasename) {
return new AirCon(new AirConMacroExecutor()).loadAndProcess(resourceBasename);
}
public static<T> T getOrElse(Config config, String key, T defaultResult) {
if (config.hasPath(key)) {
return (T)config.getAnyRef(key);
} else {
return defaultResult;
}
}
protected AirCon(MacroExecutor executor) {
this.executor = executor;
}
private Config tryLoadFromFS(String resourceBasename) {
File file = new File(resourceBasename);
if(file.exists()) {
log.info("Loading config from file: " + file);
return ConfigFactory.load(ConfigFactory.parseFile(file));
} else {
log.info("Loading config using classpath loader: " + resourceBasename);
return ConfigFactory.load(
resourceBasename,
ConfigParseOptions.defaults().setAllowMissing(false),
ConfigResolveOptions.defaults()
);
}
}
protected Config loadAndProcess(String resourceBasename) {
// Need this because some other processes might load something else that interfere with.
ConfigFactory.invalidateCaches();
Config config = tryLoadFromFS(resourceBasename);
int numReloads = 1;
if (config.hasPath("num_reloads_required")) {
numReloads = config.getInt("num_reloads_required");
System.out.println("Gonna execute macros " + numReloads + " times.");
}
for (int i = 0; i < numReloads; i++) {
if (maybeApplyAirbnbSpecificMacros(config)) {
ConfigFactory.invalidateCaches();
config = tryLoadFromFS(resourceBasename);
}
}
cleanUp();
return config;
}
private boolean maybeApplyAirbnbSpecificMacros(Config config) {
boolean macroUsed = false;
for (String key : config.root().keySet()) {
if (key.startsWith("aircon_get_")) {
System.out.println("Macro " + key);
String output = applyAirbnbMacro(config, key);
System.out.println("... output = " + output);
if (output != null) {
System.setProperty(key + ".output", output);
keysToCleanUp.add(key + ".output");
macroUsed = true;
}
}
}
return macroUsed;
}
private String applyAirbnbMacro(Config config, String key) {
Config macroConfig = config.getConfig(key);
switch (macroConfig.getString("macro")) {
case "max_string":
return executor.getMaxString(macroConfig);
case "min_string":
return executor.getMinString(macroConfig);
case "sum":
return executor.getSum(macroConfig);
case "matched_latest_directory":
return executor.getMatchedLatestDirectory(macroConfig);
case "date_minus_days":
return executor.getDateMinusDays(macroConfig);
case "hdfs_partitions_by_dates":
return executor.getHdfsPartitionsByDates(macroConfig);
default:
throw new InvalidMacro("Unknown macro " + macroConfig.getString("macro"));
}
}
private void cleanUp() {
Properties props = System.getProperties();
for (String key: keysToCleanUp) {
props.remove(key);
}
System.setProperties(props);
keysToCleanUp.clear();
}
}
| 7,281 |
0 | Create_ds/aerosolve/airlearner/airlearner-utils/src/main/java/com/airbnb/common | Create_ds/aerosolve/airlearner/airlearner-utils/src/main/java/com/airbnb/common/config/AirConMacroExecutor.java | package com.airbnb.common.config;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.base.Joiner;
import com.typesafe.config.Config;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class AirConMacroExecutor implements MacroExecutor {
static private final DateTimeFormatter DATE_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd");
AirConMacroExecutor() {}
@Override
public String getMaxString(Config config) {
return Collections.max(config.getStringList("input"));
}
@Override
public String getMinString(Config config) {
return Collections.min(config.getStringList("input"));
}
@Override
public String getSum(Config config) {
Long sum = 0L;
for (Integer el : config.getIntList("input")) sum += el;
return sum.toString();
}
@Override
public String getMatchedLatestDirectory(Config config) {
final String javaRegexPattern = config.getString("java_regex_pattern");
final String dirName = config.getString("dir_name");
return getMatchedLatestDirectory(dirName, javaRegexPattern);
}
private String getMatchedLatestDirectory(String dirName, String javaRegexPattern) {
Pattern pattern = Pattern.compile(javaRegexPattern);
final String latestFile = getLatestInDirectory(dirName);
if (latestFile == null) {
return null;
}
Matcher matcher = pattern.matcher(latestFile);
if (matcher.find()) {
try {
return matcher.group(1);
} catch (IndexOutOfBoundsException e) {
throw new AirCon.InvalidRegexPattern(
"There's no group in the regex " + javaRegexPattern);
}
} else {
return null;
}
}
private String getLatestInDirectory(String hdfsDir) {
List<String> allFileNames = getAllDirOrFiles(hdfsDir);
if (allFileNames == null) {
return null;
}
if (allFileNames.isEmpty()) {
return null;
} else {
Collections.sort(allFileNames);
return allFileNames.get(allFileNames.size() - 1);
}
}
protected List<String> getAllDirOrFiles(String hdfsDir) {
List<String> allFileNames = new ArrayList<>();
try {
FileSystem fs = FileSystem.get(new Path(hdfsDir).toUri(), new Configuration());
FileStatus[] allFiles = fs.listStatus(new Path(hdfsDir));
for (FileStatus fileStatus : allFiles) {
String filename = fileStatus.getPath().getName();
allFileNames.add(filename);
}
} catch (IOException e) {
System.err.println(e.getMessage());
return null;
}
return allFileNames;
}
private void assertMacroParam(Config config, String key) {
if (!config.hasPath(key)) {
throw new AirCon.InvalidMacro(key + " param is missing");
}
}
@Override
public String getDateMinusDays(Config config) {
assertMacroParam(config, "date");
assertMacroParam(config, "minus_days");
DateTime date = DateTime.parse(config.getString("date"), DATE_FORMATTER);
return date.minusDays(config.getInt("minus_days")).toString(DATE_FORMATTER);
}
@Override
public String getHdfsPartitionsByDates(Config config) {
assertMacroParam(config, "dir_name");
assertMacroParam(config, "date_regex_pattern");
assertMacroParam(config, "start_date");
assertMacroParam(config, "end_date");
Pattern pattern = Pattern.compile(config.getString("date_regex_pattern"));
DateTime startDate = DateTime.parse(config.getString("start_date"), DATE_FORMATTER);
DateTime endDate = DateTime.parse(config.getString("end_date"), DATE_FORMATTER);
ArrayList<String> matchedNames = new ArrayList<>();
String dirName = config.getString("dir_name");
for (String dirOrFileName : getAllDirOrFiles(dirName)) {
Matcher matcher = pattern.matcher(dirOrFileName);
if (matcher.find()) {
try {
DateTime date = DateTime.parse(matcher.group(1), DATE_FORMATTER);
if (date.compareTo(startDate) >= 0 && date.compareTo(endDate) <= 0) {
matchedNames.add(dirName + "/" + dirOrFileName + "/part-*.gz");
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
return Joiner.on(",").join(matchedNames);
}
}
| 7,282 |
0 | Create_ds/aerosolve/airlearner/airlearner-utils/src/main/java/com/airbnb/common | Create_ds/aerosolve/airlearner/airlearner-utils/src/main/java/com/airbnb/common/config/MacroExecutor.java | package com.airbnb.common.config;
import com.typesafe.config.Config;
public interface MacroExecutor {
String getMaxString(Config config);
String getMinString(Config config);
String getSum(Config config);
String getMatchedLatestDirectory(Config config);
String getDateMinusDays(Config config);
String getHdfsPartitionsByDates(Config config);
}
| 7,283 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java | Create_ds/amazon-neptune-jdbc-driver/src/test/java/tdvt/TDVTDataUpload.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package tdvt;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.tinkerpop.gremlin.driver.Client;
import org.apache.tinkerpop.gremlin.driver.Cluster;
import org.apache.tinkerpop.gremlin.driver.Result;
import org.apache.tinkerpop.gremlin.driver.ResultSet;
import org.apache.tinkerpop.gremlin.driver.remote.DriverRemoteConnection;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.joda.time.DateTime;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import software.aws.neptune.gremlin.GremlinConnectionProperties;
import software.aws.neptune.gremlin.sql.SqlGremlinConnection;
import software.aws.neptune.gremlin.sql.SqlGremlinQueryExecutor;
import software.aws.neptune.jdbc.utilities.AuthScheme;
import software.aws.neptune.jdbc.utilities.ConnectionProperties;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.sql.SQLException;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import static org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource.traversal;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.CONTACT_POINT_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.ENABLE_SSL_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.PORT_KEY;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_HOSTNAME;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_PRIVATE_KEY_FILE;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_STRICT_HOST_KEY_CHECKING;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_USER;
@Disabled
public class TDVTDataUpload {
private static final String ENDPOINT = "database-1.cluster-cdffsmv2nzf7.us-east-2.neptune.amazonaws.com";
private static final String SAMPLE_QUERY = "g.V().count()";
private static final int PORT = 8182;
private static final int COUNT_PER = 20;
private static final Dataset DATASET = Dataset.Calcs;
private static int count = 0;
private static java.sql.Connection connection;
private static java.sql.DatabaseMetaData databaseMetaData;
private static Client client;
@BeforeAll
static void initialize() throws Exception {
final Properties properties = new Properties();
properties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.IAMSigV4); // set default to IAMSigV4
properties.put(CONTACT_POINT_KEY, ENDPOINT);
properties.put(PORT_KEY, PORT);
properties.put(ENABLE_SSL_KEY, true);
properties.put(SSH_USER, "ec2-user");
properties.put(SSH_HOSTNAME, "52.14.185.245");
properties.put(SSH_PRIVATE_KEY_FILE, "~/Downloads/EC2/neptune-test.pem");
properties.put(SSH_STRICT_HOST_KEY_CHECKING, "false");
final GremlinConnectionProperties gremlinConnectionProperties = new GremlinConnectionProperties(properties);
final java.sql.Connection tempConnection = new SqlGremlinConnection(gremlinConnectionProperties);
final Cluster cluster = SqlGremlinQueryExecutor.createClusterBuilder(gremlinConnectionProperties).create();
client = cluster.connect().init();
final ResultSet results = client.submit("inject(0)");
System.out.println(results.toString());
}
@AfterEach
void deinitialize() throws SQLException {
if (client != null) {
client.close();
}
}
Object attemptDateConversion(final Object data) {
try {
final Date date = DateTime.parse(data.toString()).toDate();
return date;
} catch (final Exception ignored) {
return data;
}
}
GraphTraversal<?, ?> appendTraversal(final BufferedReader br, final GraphTraversalSource g)
throws Exception {
count = 0;
GraphTraversal<?, ?> graphTraversal = null;
for (String line; ((line = br.readLine()) != null && (count < COUNT_PER)); ) {
if (line.trim().length() == 0) {
continue;
}
graphTraversal = (graphTraversal == null) ? (DATASET == Dataset.Calcs ? g.addV("Calcs") : g.addV("Staples"))
: (DATASET == Dataset.Calcs ? graphTraversal.addV("Calcs") : graphTraversal.addV("Staples"));
count++;
final JSONParser parser = new JSONParser();
final JSONObject json = (JSONObject) parser.parse(line);
for (final Object key : json.keySet()) {
Object value = json.get(key);
if (value == null) {
continue;
}
if (value instanceof String) {
value = StringEscapeUtils.unescapeJava(value.toString());
value = attemptDateConversion(value);
} else if (value instanceof Boolean) {
System.out.println("Boolean! " + value);
}
graphTraversal.property(key, value);
}
}
return graphTraversal;
}
@Test
void loadData() throws SQLException, ExecutionException, InterruptedException {
GraphTraversal<?, ?> graphTraversal = null;
try {
final String fileName = "/Users/lyndonb/Desktop/calcs_gremlin.json";
if (DATASET.equals(Dataset.Calcs)) {
if (!fileName.toLowerCase().contains("calcs")) {
throw new Exception("Possible error in data upload.");
}
deleteTable("Calcs");
} else if (DATASET.equals(Dataset.Staples)) {
if (!fileName.toLowerCase().contains("staples")) {
throw new Exception("Possible error in data upload.");
}
deleteTable("Staples");
}
final File file = new File(fileName);
int total = 0;
final GraphTraversalSource g = traversal().withRemote(DriverRemoteConnection.using(client));
final BufferedReader br = new BufferedReader(new FileReader(file));
do {
graphTraversal = appendTraversal(br, g);
graphTraversal.iterate();
total += count;
System.out.println("Executing " + count + " queries up to " + total);
} while (count == COUNT_PER);
System.out.println("Total queries " + total);
} catch (final Exception e) {
e.printStackTrace();
Assertions.fail(e.getMessage());
}
}
void deleteTable(final String table) {
long vertexCount = client.submit("g.V().hasLabel('" + table + "').count()").one().getLong();
while (vertexCount > 0) {
final Result result = client.submit("g.V().hasLabel('" + table + "').limit(5000).drop().iterate()").one();
vertexCount -= 5000;
System.out.println("Dropped 5000, " + vertexCount + " left.");
}
}
@Test
void getVertexCount() {
long vertexCount = client.submit("g.V().hasLabel('Staples').count()").one().getLong();
System.out.println("Vertex Staples: " + vertexCount);
vertexCount = client.submit("g.V().hasLabel('staples').count()").one().getLong();
System.out.println("Vertex staples: " + vertexCount);
vertexCount = client.submit("g.V().hasLabel('Calcs').count()").one().getLong();
System.out.println("Vertex Calcs: " + vertexCount);
vertexCount = client.submit("g.V().hasLabel('calcs').count()").one().getLong();
System.out.println("Vertex calcs: " + vertexCount);
}
enum Dataset {
Calcs,
Staples
}
}
| 7,284 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/NeptuneStatementTestHelper.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import lombok.AllArgsConstructor;
import org.junit.jupiter.api.Assertions;
import software.aws.neptune.jdbc.helpers.HelperFunctions;
import software.aws.neptune.jdbc.utilities.SqlError;
@AllArgsConstructor
public class NeptuneStatementTestHelper extends NeptuneStatementTestHelperBase {
private final java.sql.Statement statement;
private final String longQuery;
private final String quickQuery;
/**
* Function to test cancelling queries without executing first.
*/
public void testCancelQueryWithoutExecute() {
launchCancelThread(0, statement);
waitCancelToComplete();
HelperFunctions.expectFunctionThrows(SqlError.QUERY_NOT_STARTED_OR_COMPLETE, this::getCancelException);
}
/**
* Function to test cancelling query while execution in progress.
*/
public void testCancelQueryWhileExecuteInProgress() {
// Wait 100 milliseconds before attempting to cancel.
launchCancelThread(100, statement);
HelperFunctions.expectFunctionThrows(SqlError.QUERY_CANCELED, () -> statement.execute(longQuery));
waitCancelToComplete();
}
/**
* Function to test cancelling query twice.
*/
public void testCancelQueryTwice() {
// Wait 100 milliseconds before attempting to cancel.
launchCancelThread(100, statement);
HelperFunctions.expectFunctionThrows(SqlError.QUERY_CANCELED, () -> statement.execute(longQuery));
waitCancelToComplete();
launchCancelThread(1, statement);
waitCancelToComplete();
HelperFunctions.expectFunctionThrows(SqlError.QUERY_NOT_STARTED_OR_COMPLETE, this::getCancelException);
}
/**
* Function to test cancelling query after execution is complete.
*/
public void testCancelQueryAfterExecuteComplete() {
Assertions.assertDoesNotThrow(() -> statement.execute(quickQuery));
launchCancelThread(0, statement);
waitCancelToComplete();
HelperFunctions.expectFunctionThrows(SqlError.QUERY_NOT_STARTED_OR_COMPLETE, this::getCancelException);
}
}
| 7,285 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/NeptuneDriverTestNoEncryption.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.sql.SQLException;
public class NeptuneDriverTestNoEncryption extends NeptuneDriverTestBase {
private static final boolean NO_ENCRYPTION = false;
/**
* Function to get a random available port and initialize database before testing,
* with encryption disabled.
*/
@BeforeAll
public static void initializeDatabase() {
initializeDatabase(NO_ENCRYPTION);
}
/**
* Function to get a shutdown database after testing.
*/
@AfterAll
public static void shutdownDatabase() {
shutdownTheDatabase();
}
@BeforeEach
void initialize() {
super.initialize();
}
@Test
void testAcceptsUrl() throws SQLException {
super.testAcceptsUrl(NO_ENCRYPTION);
}
@Test
void testConnect() throws SQLException {
super.testConnect(NO_ENCRYPTION);
}
@Test
void testDriverManagerGetConnection() throws SQLException {
super.testDriverManagerGetConnection(NO_ENCRYPTION);
}
@Test
void testDriverManagerGetDriver() throws SQLException {
super.testDriverManagerGetDriver(NO_ENCRYPTION);
}
}
| 7,286 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/NeptunePreparedStatementTestHelper.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import lombok.AllArgsConstructor;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.function.ThrowingSupplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.aws.neptune.jdbc.helpers.HelperFunctions;
import software.aws.neptune.jdbc.utilities.SqlError;
import java.io.InputStream;
import java.io.Reader;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.NClob;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
@AllArgsConstructor
public class NeptunePreparedStatementTestHelper extends NeptuneStatementTestHelperBase {
private static final Logger LOGGER = LoggerFactory.getLogger(NeptunePreparedStatementTestHelper.class);
private final java.sql.PreparedStatement preparedStatement;
private final java.sql.PreparedStatement preparedStatementLongQuery;
private final java.sql.PreparedStatement preparedStatementQuickQuery;
/**
* Function to close the statements.
*/
public void close() throws SQLException {
preparedStatement.close();
preparedStatementLongQuery.close();
preparedStatementQuickQuery.close();
}
/**
* Function to test cancelling queries without executing first.
*/
public void testCancelQueryWithoutExecute() {
launchCancelThread(0, preparedStatement);
waitCancelToComplete();
HelperFunctions.expectFunctionThrows(SqlError.QUERY_NOT_STARTED_OR_COMPLETE, this::getCancelException);
}
/**
* Function to test cancelling query while execution in progress.
*/
public void testCancelQueryWhileExecuteInProgress() {
// Wait 100 milliseconds before attempting to cancel.
launchCancelThread(100, preparedStatementLongQuery);
HelperFunctions.expectFunctionThrows(SqlError.QUERY_CANCELED, preparedStatementLongQuery::execute);
waitCancelToComplete();
}
/**
* Function to test cancelling query twice.
*/
public void testCancelQueryTwice() {
// Wait 100 milliseconds before attempting to cancel.
launchCancelThread(100, preparedStatementLongQuery);
HelperFunctions
.expectFunctionThrows(SqlError.QUERY_CANCELED, preparedStatementLongQuery::execute);
waitCancelToComplete();
launchCancelThread(1, preparedStatementLongQuery);
waitCancelToComplete();
HelperFunctions.expectFunctionThrows(SqlError.QUERY_NOT_STARTED_OR_COMPLETE, this::getCancelException);
}
/**
* Function to test cancelling query after execution is complete.
*/
public void testCancelQueryAfterExecuteComplete() {
Assertions.assertDoesNotThrow((ThrowingSupplier<Boolean>) preparedStatementQuickQuery::execute);
launchCancelThread(0, preparedStatementQuickQuery);
waitCancelToComplete();
HelperFunctions.expectFunctionThrows(SqlError.QUERY_NOT_STARTED_OR_COMPLETE, this::getCancelException);
}
/**
* Function to test misc functions.
*/
public void testMisc() {
Assertions.assertThrows(SQLFeatureNotSupportedException.class, preparedStatement::executeUpdate);
Assertions.assertThrows(SQLFeatureNotSupportedException.class, preparedStatement::addBatch);
Assertions.assertThrows(SQLFeatureNotSupportedException.class, preparedStatement::clearParameters);
Assertions.assertThrows(SQLFeatureNotSupportedException.class, preparedStatement::getParameterMetaData);
}
/**
* Function to test set functionality.
*/
public void testSet() {
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setArray(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setAsciiStream(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setAsciiStream(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setAsciiStream(0, null, (long) 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setAsciiStream(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBigDecimal(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBinaryStream(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBinaryStream(0, null, (long) 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBinaryStream(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBlob(0, (Blob) null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBlob(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBlob(0, (InputStream) null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBoolean(0, false));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setByte(0, (byte) 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setBytes(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setCharacterStream(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setCharacterStream(0, null, (long) 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setCharacterStream(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setClob(0, (Clob) null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setClob(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setClob(0, (Reader) null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setDate(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setDate(0, null, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setDouble(0, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setFloat(0, (float) 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setInt(0, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setLong(0, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNCharacterStream(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNCharacterStream(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNClob(0, (NClob) null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNClob(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNClob(0, (Reader) null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNString(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNull(0, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setNull(0, 0, ""));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setObject(0, null, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setObject(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setObject(0, null, 0, 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setRef(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setRowId(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setSQLXML(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setShort(0, (short) 0));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setString(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setTime(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setTime(0, null, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setTimestamp(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setTimestamp(0, null, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setURL(0, null));
Assertions.assertThrows(SQLFeatureNotSupportedException.class,
() -> preparedStatement.setUnicodeStream(0, null, 0));
}
}
| 7,287 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/NeptuneStatementTestHelperBase.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import lombok.Getter;
import lombok.SneakyThrows;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class NeptuneStatementTestHelperBase {
@Getter
private final ExecutorService cancelThread = Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder().setNameFormat("cancelThread").setDaemon(true).build());
private Cancel cancel = null;
protected void launchCancelThread(final int waitTime, final Statement statement) {
cancel = new Cancel(statement, waitTime);
getCancelThread().execute(cancel);
}
protected void getCancelException() throws SQLException {
cancel.getException();
}
@SneakyThrows
protected void waitCancelToComplete() {
cancelThread.awaitTermination(1000, TimeUnit.MILLISECONDS);
}
/**
* Class to cancel query in a separate thread.
*/
public static class Cancel implements Runnable {
private final Statement statement;
private final int waitTime;
private SQLException exception;
Cancel(final Statement statement, final int waitTime) {
this.statement = statement;
this.waitTime = waitTime;
}
@SneakyThrows
@Override
public void run() {
try {
Thread.sleep(waitTime);
statement.cancel();
} catch (final SQLException e) {
exception = e;
}
}
/**
* Function to get exception if the run call generated one.
*
* @throws SQLException Exception caught by run.
*/
public void getException() throws SQLException {
if (exception != null) {
throw exception;
}
}
}
}
| 7,288 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/NeptuneDriverTestWithEncryption.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.sql.SQLException;
/**
* Test for NeptuneDriver Object.
*/
public class NeptuneDriverTestWithEncryption extends NeptuneDriverTestBase {
private static final boolean WITH_ENCRYPTION = true;
/**
* Function to get a random available port and initialize database before testing,
* with encryption enabled.
*/
@BeforeAll
public static void initializeDatabase() {
initializeDatabase(WITH_ENCRYPTION);
}
/**
* Function to get a shutdown database after testing.
*/
@AfterAll
public static void shutdownDatabase() {
shutdownTheDatabase();
}
@BeforeEach
void initialize() {
super.initialize();
}
@Test
void testAcceptsUrl() throws SQLException {
super.testAcceptsUrl(WITH_ENCRYPTION);
}
@Test
@Disabled
void testConnect() throws SQLException {
super.testConnect(WITH_ENCRYPTION);
}
@Test
@Disabled
void testDriverManagerGetConnection() throws SQLException {
super.testDriverManagerGetConnection(WITH_ENCRYPTION);
}
@Test
void testDriverManagerGetDriver() throws SQLException {
super.testDriverManagerGetDriver(WITH_ENCRYPTION);
}
}
| 7,289 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/NeptuneDriverTestBase.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import com.google.common.collect.ImmutableList;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import software.aws.neptune.jdbc.Driver;
import software.aws.neptune.jdbc.helpers.HelperFunctions;
import software.aws.neptune.jdbc.utilities.AuthScheme;
import software.aws.neptune.jdbc.utilities.SqlError;
import software.aws.neptune.opencypher.OpenCypherConnection;
import software.aws.neptune.opencypher.mock.MockOpenCypherDatabase;
import java.io.IOException;
import java.io.InputStream;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.List;
import java.util.Properties;
public abstract class NeptuneDriverTestBase {
private static MockOpenCypherDatabase database;
private static String validEndpoint;
private static final String PROPERTIES_PATH = "/project.properties";
private static final String MAJOR_VERSION_KEY = "driver.major.version";
private static final String MINOR_VERSION_KEY = "driver.minor.version";
private static final String FULL_VERSION_KEY = "driver.full.version";
private final List<String> invalidUrls = ImmutableList.of(
"jbdc:neptune:opencyher://;", "jdbc:netune:opencyher://;", "jdbc:neptune:openyher://;",
"jdbc:neptune:opencyher//;", "jdbc:neptune:opencypher:/");
private final List<String> languages = ImmutableList.of("opencypher");
private final List<Boolean> semicolons = ImmutableList.of(true, false);
private java.sql.Driver driver;
protected static String createValidUrl(final boolean useEncryption,
final String language,
final boolean trailingSemicolon) {
final AuthScheme authScheme = useEncryption ? AuthScheme.IAMSigV4 : AuthScheme.None;
String url = String.format("jdbc:neptune:%s://%s;useEncryption=%s;authScheme=%s",
language, validEndpoint, useEncryption, authScheme.toString());
if (trailingSemicolon) {
url += ";";
}
return url;
}
protected static String appendProperty(final String url, final String property, final boolean trailingSemicolon) {
String returnUrl = url;
if (!property.isEmpty()) {
returnUrl += String.format("%s", property);
}
if (trailingSemicolon) {
returnUrl += ";";
}
return returnUrl;
}
/**
* Function to get a random available port and initialize database before testing.
*/
protected static void initializeDatabase(final boolean useEncryption) {
database = MockOpenCypherDatabase.builder(
"localhost", NeptuneDriverTestWithEncryption.class.getName(), useEncryption)
.build();
validEndpoint = String.format("bolt://%s:%d", "localhost", database.getPort());
}
protected static void shutdownTheDatabase() {
database.shutdown();
}
void initialize() {
driver = new NeptuneDriver();
}
void testAcceptsUrl(final boolean useEncryption) throws SQLException {
for (final String language : languages) {
for (final Boolean semicolon : semicolons) {
final String url = createValidUrl(useEncryption, language, semicolon);
Assertions.assertTrue(driver.acceptsURL(url));
}
}
for (final String url : invalidUrls) {
Assertions.assertFalse(driver.acceptsURL(url));
}
}
void testConnect(final boolean useEncryption) throws SQLException {
for (final String language : languages) {
for (final Boolean semicolon : semicolons) {
final String validUrl = createValidUrl(useEncryption, language, semicolon);
Assertions.assertTrue(driver.connect(validUrl, new Properties()) instanceof OpenCypherConnection);
}
}
final String validUrl = createValidUrl(useEncryption, "opencypher", false);
Assertions.assertNotNull(driver.connect(validUrl, null));
for (final String invalidUrl : invalidUrls) {
Assertions.assertNull(driver.connect(invalidUrl, new Properties()));
}
Assertions.assertNull(driver.connect(null, new Properties()));
}
void testDriverManagerGetConnection(final boolean useEncryption) throws SQLException {
for (final String language : languages) {
for (final Boolean semicolon : semicolons) {
final String url = createValidUrl(useEncryption, language, semicolon);
Assertions.assertTrue(DriverManager.getConnection(url) instanceof OpenCypherConnection);
}
}
for (final String url : invalidUrls) {
Assertions.assertThrows(java.sql.SQLException.class, () -> DriverManager.getConnection(url));
}
}
void testDriverManagerGetDriver(final boolean useEncryption) throws SQLException {
for (final String language : languages) {
for (final Boolean semicolon : semicolons) {
final String url = createValidUrl(useEncryption, language, semicolon);
Assertions.assertTrue(DriverManager.getDriver(url) instanceof NeptuneDriver);
}
}
for (final String url : invalidUrls) {
Assertions.assertThrows(java.sql.SQLException.class, () -> DriverManager.getDriver(url));
}
}
@Test
void testDriverProperties() {
HelperFunctions.expectFunctionThrows(SqlError.FEATURE_NOT_SUPPORTED, () -> driver.getParentLogger());
}
@Test
void testDriverVersion() {
try (InputStream input = Driver.class.getResourceAsStream(PROPERTIES_PATH)) {
final Properties properties = new Properties();
properties.load(input);
Assertions.assertEquals(driver.getMajorVersion(), Integer.parseInt(properties.getProperty(MAJOR_VERSION_KEY)));
Assertions.assertEquals(driver.getMinorVersion(), Integer.parseInt(properties.getProperty(MINOR_VERSION_KEY)));
// Ensure the version did not default
Assertions.assertNotEquals(driver.getMajorVersion(), 0);
Assertions.assertTrue(properties.containsKey(FULL_VERSION_KEY));
} catch (IOException e) {
Assertions.fail(e.getMessage());
}
}
}
| 7,290 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/ConnectionPropertiesTestBase.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune;
import com.google.common.collect.ImmutableList;
import org.junit.jupiter.api.Assertions;
import software.aws.neptune.jdbc.utilities.AuthScheme;
import software.aws.neptune.jdbc.utilities.ConnectionProperties;
import software.aws.neptune.opencypher.OpenCypherConnectionProperties;
import java.util.List;
import java.util.Properties;
public abstract class ConnectionPropertiesTestBase {
protected static final boolean DEFAULT_FALSE = false;
protected static final String DEFAULT_EMPTY_STRING = "";
protected static final int NO_DEFAULT_INT = 0;
protected static final boolean NO_DEFAULT_BOOL = false;
protected static final String NO_DEFAULT_STRING = null;
protected abstract void assertDoesNotThrowOnNewConnectionProperties(final Properties properties);
protected abstract void assertThrowsOnNewConnectionProperties(final Properties properties);
protected abstract <T> void assertPropertyValueEqualsToExpected(final String key, final T expectedValue);
protected void testAuthSchemeViaConstructor() {
final List<String> emptyAuthSchemes = ImmutableList.of(
"", " ");
final List<String> validAuthSchemes = ImmutableList.of(
"NONE", "none", "IAMSigV4", "iamSIGV4");
final List<String> invalidAuthSchemes = ImmutableList.of(
"-1;", "100;", "46hj7;", "foo;");
// Verify empty string is converted to a default value.
for (final String emptyValue : emptyAuthSchemes) {
final Properties properties = new Properties();
properties.put(ConnectionProperties.AUTH_SCHEME_KEY, emptyValue);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(
ConnectionProperties.AUTH_SCHEME_KEY, ConnectionProperties.DEFAULT_AUTH_SCHEME);
}
// Verify valid property value is set.
for (final String validValue : validAuthSchemes) {
// Convert string to enum.
Assertions.assertNotNull(
AuthScheme.fromString(validValue)
);
final Properties properties = new Properties();
properties.put(ConnectionProperties.AUTH_SCHEME_KEY, validValue);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(
ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.fromString(validValue));
}
// Verify invalid property value throws error.
for (final String invalidValue : invalidAuthSchemes) {
// Test failure to convert invalid string to enum.
Assertions.assertNull(
AuthScheme.fromString(invalidValue)
);
final Properties properties = new Properties();
properties.setProperty(OpenCypherConnectionProperties.AUTH_SCHEME_KEY, invalidValue);
assertThrowsOnNewConnectionProperties(properties);
}
}
protected void testLogLevelSettingViaConstructor() {
final List<String> validLogLevels = ImmutableList.of(
"", "Off", "FATAL", "error", "InFo", "dEbug", "TRACE", "All");
final List<String> invalidLogLevels = ImmutableList.of(
"something", "5");
// Verify valid property value doesn't throw error.
for (final String validValue : validLogLevels) {
// Set property through constructor.
final Properties properties = new Properties();
properties.put(ConnectionProperties.LOG_LEVEL_KEY, validValue);
assertDoesNotThrowOnNewConnectionProperties(properties);
}
// Verify invalid property value throws error.
for (final String invalidValue : invalidLogLevels) {
final Properties properties = new Properties();
properties.setProperty(ConnectionProperties.LOG_LEVEL_KEY, invalidValue);
assertThrowsOnNewConnectionProperties(properties);
}
}
protected void testStringPropertyViaConstructor(
final String key) {
testStringPropertyViaConstructor(
new Properties(), key, NO_DEFAULT_STRING, false);
}
protected void testStringPropertyViaConstructor(
final String key,
final String defaultValue) {
testStringPropertyViaConstructor(
new Properties(), key, defaultValue);
}
protected void testStringPropertyViaConstructor(
final Properties initProperties,
final String key,
final String defaultValue) {
testStringPropertyViaConstructor(
initProperties, key, defaultValue, true);
}
private void testStringPropertyViaConstructor(
final Properties initProperties,
final String key,
final String defaultValue,
final boolean hasDefault) {
final List<String> testValues = ImmutableList.of("foo", "bar");
if (hasDefault) {
final Properties properties = new Properties();
properties.putAll(initProperties);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, defaultValue);
}
// Verify valid property value doesn't throw error.
for (final String value : testValues) {
final Properties properties = new Properties();
properties.putAll(initProperties);
properties.put(key, value);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, value);
}
}
protected void testIntegerPropertyViaConstructor(
final String key) {
testIntegerPropertyViaConstructor(
new Properties(), key, NO_DEFAULT_INT, false);
}
protected void testIntegerPropertyViaConstructor(
final String key,
final int defaultValue) {
testIntegerPropertyViaConstructor(
new Properties(), key, defaultValue);
}
protected void testIntegerPropertyViaConstructor(
final Properties initProperties,
final String key,
final int defaultValue) {
testIntegerPropertyViaConstructor(
initProperties, key, defaultValue, true);
}
private void testIntegerPropertyViaConstructor(
final Properties initProperties,
final String key,
final int defaultValue,
final boolean hasDefault) {
final List<String> validValues = ImmutableList.of(
"0", "5", "10000");
final List<String> invalidValues = ImmutableList.of(
"-1", "blah", String.valueOf((long) Integer.MAX_VALUE + 1000));
if (hasDefault) {
final Properties properties = new Properties();
properties.putAll(initProperties);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, defaultValue);
}
// Verify valid property value doesn't throw error.
for (final String validValue : validValues) {
final Properties properties = new Properties();
properties.putAll(initProperties);
properties.put(key, validValue);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, Integer.parseInt(validValue));
}
// Verify invalid property value throws error.
for (final String invalidValue : invalidValues) {
final Properties properties = new Properties();
properties.putAll(initProperties);
properties.setProperty(key, invalidValue);
assertThrowsOnNewConnectionProperties(properties);
}
}
protected void testBooleanPropertyViaConstructor(
final String key) {
testBooleanPropertyViaConstructor(
new Properties(), key, NO_DEFAULT_BOOL, false);
}
protected void testBooleanPropertyViaConstructor(
final String key,
final boolean defaultValue) {
testBooleanPropertyViaConstructor(
new Properties(), key, defaultValue);
}
protected void testBooleanPropertyViaConstructor(
final Properties initProperties,
final String key,
final boolean defaultValue) {
testBooleanPropertyViaConstructor(
initProperties, key, defaultValue, true);
}
private void testBooleanPropertyViaConstructor(
final Properties initProperties,
final String key,
final boolean defaultValue,
final boolean hasDefault) {
final List<String> validTrueValues = ImmutableList.of(
"1", "true", "TRUE", "tRue");
final List<String> validFalseValues = ImmutableList.of(
"0", "false", "FALSE", "FaLSe");
final List<String> invalidValues = ImmutableList.of(
"-1;", "100;", "46hj7;", "foo;");
if (hasDefault) {
final Properties properties = new Properties();
properties.putAll(initProperties);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, defaultValue);
}
// Verify valid TRUE property value is set.
for (final String validTrueValue : validTrueValues) {
final Properties properties = new Properties();
properties.putAll(initProperties);
properties.put(key, validTrueValue);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, true);
}
// Verify valid FALSE property value is set.
for (final String validFalseValue : validFalseValues) {
final Properties properties = new Properties();
properties.putAll(initProperties);
properties.put(key, validFalseValue);
assertDoesNotThrowOnNewConnectionProperties(properties);
assertPropertyValueEqualsToExpected(key, false);
}
// Verify invalid property value throws error.
for (final String invalidValue : invalidValues) {
final Properties properties = new Properties();
properties.putAll(initProperties);
properties.setProperty(key, invalidValue);
assertThrowsOnNewConnectionProperties(properties);
}
}
}
| 7,291 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinPreparedStatementTest.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import software.aws.neptune.NeptunePreparedStatementTestHelper;
import software.aws.neptune.gremlin.mock.MockGremlinDatabase;
import java.io.IOException;
import java.sql.SQLException;
import static software.aws.neptune.gremlin.GremlinHelper.getProperties;
// TODO AN-887: Fix query cancellation issue and enable tests.
@Disabled
public class GremlinPreparedStatementTest extends GremlinStatementTestBase {
private static final String HOSTNAME = "localhost";
private static final int PORT = 8181; // Mock server uses 8181.
private static final int MAX_CONTENT_LENGTH = 500000; // Took from PropertyGraphSerializationModule.
private NeptunePreparedStatementTestHelper neptunePreparedStatementTestHelper;
@BeforeEach
void initialize() throws SQLException, IOException, InterruptedException {
MockGremlinDatabase.startGraph();
final java.sql.Connection connection = new GremlinConnection(
new GremlinConnectionProperties(getProperties(HOSTNAME, PORT, MAX_CONTENT_LENGTH)));
neptunePreparedStatementTestHelper = new NeptunePreparedStatementTestHelper(connection.prepareStatement(""),
connection.prepareStatement(getLongQuery()), connection.prepareStatement(QUICK_QUERY));
}
@AfterEach
void close() throws SQLException, IOException, InterruptedException {
neptunePreparedStatementTestHelper.close();
MockGremlinDatabase.stopGraph();
}
@Test
void testCancelQueryWithoutExecute() {
neptunePreparedStatementTestHelper.testCancelQueryWithoutExecute();
}
@Test
void testCancelQueryWhileExecuteInProgress() {
neptunePreparedStatementTestHelper.testCancelQueryWhileExecuteInProgress();
}
@Test
void testCancelQueryTwice() {
neptunePreparedStatementTestHelper.testCancelQueryTwice();
}
@Test
void testCancelQueryAfterExecuteComplete() {
neptunePreparedStatementTestHelper.testCancelQueryAfterExecuteComplete();
}
@Test
void testMisc() {
neptunePreparedStatementTestHelper.testMisc();
}
@Test
void testSet() {
neptunePreparedStatementTestHelper.testSet();
}
}
| 7,292 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinIntegrationTest.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import software.aws.neptune.jdbc.helpers.HelperFunctions;
import software.aws.neptune.jdbc.utilities.ConnectionProperties;
import software.aws.neptune.jdbc.utilities.SqlError;
import java.io.IOException;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.CONTACT_POINT_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.ENABLE_SSL_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.PORT_KEY;
import static software.aws.neptune.gremlin.GremlinHelper.createVertex;
import static software.aws.neptune.gremlin.GremlinHelper.dropAll;
import static software.aws.neptune.gremlin.GremlinHelper.dropVertex;
import static software.aws.neptune.gremlin.GremlinHelper.getAll;
import static software.aws.neptune.gremlin.GremlinHelper.getVertex;
@Disabled
public class GremlinIntegrationTest extends GremlinStatementTestBase {
private static final int PORT = 8182;
private static final String ENDPOINT = "iam-auth-test-lyndon.cluster-cdubgfjknn5r.us-east-1.neptune.amazonaws.com";
private static final String AUTH = "IamSigV4";
private static final String ENCRYPTION = "TRUE";
private static final String CONNECTION_STRING =
String.format("jdbc:neptune:gremlin://%s;enableSsl=%s;authScheme=%s;",
ENDPOINT, ENCRYPTION, AUTH);
private static final String VERTEX_1 = "vertex1";
@SuppressWarnings({"unchecked", "rawtypes"})
private static final Map<String, Object> VERTEX_1_MAP = new HashMap();
private static final String VERTEX_2 = "vertex2";
@SuppressWarnings({"unchecked", "rawtypes"})
private static final Map<String, Object> VERTEX_2_MAP = new HashMap();
private static final Map<String, Object> ALL_MAP = new HashMap<>();
private static java.sql.Connection connection;
static {
VERTEX_1_MAP.put("A", 1);
VERTEX_1_MAP.put("B", 2);
VERTEX_1_MAP.put("C", 3);
VERTEX_1_MAP.put("D", 4);
}
static {
VERTEX_2_MAP.put("D", 1);
VERTEX_2_MAP.put("E", 2);
}
static {
ALL_MAP.putAll(VERTEX_1_MAP);
ALL_MAP.putAll(VERTEX_2_MAP);
}
@BeforeAll
static void initialize() throws SQLException, IOException, InterruptedException {
final Properties properties = new Properties();
properties.put(ConnectionProperties.AUTH_SCHEME_KEY, AUTH);
properties.put(CONTACT_POINT_KEY, ENDPOINT);
properties.put(PORT_KEY, PORT);
properties.put(ENABLE_SSL_KEY, ENCRYPTION);
connection = new GremlinConnection(new GremlinConnectionProperties(properties));
dropAll(connection);
}
@AfterAll
static void shutdown() throws SQLException, IOException {
dropAll(connection);
connection.close();
}
private static Set<String> getActualColumns(final java.sql.ResultSet resultSet) throws SQLException {
Assertions.assertNotNull(resultSet);
final Set<String> actualColumns = new HashSet<>();
for (int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++) {
actualColumns.add(resultSet.getMetaData().getColumnName(i));
}
return actualColumns;
}
private static void validateResultSetColumns(final java.sql.ResultSet resultSet,
final Set<String> expectedColumns) throws SQLException {
Assertions.assertEquals((long) expectedColumns.size(), resultSet.getMetaData().getColumnCount());
Assertions.assertEquals(expectedColumns, getActualColumns(resultSet));
}
private static void validateResultSetRows(final java.sql.ResultSet resultSet,
final Map<String, Object> properties) throws SQLException {
for (final String col : getActualColumns(resultSet)) {
Assertions.assertEquals(resultSet.getInt(col), properties.get(col));
}
}
@Test
void testVertexStructure() throws SQLException {
createVertex(connection, VERTEX_1, VERTEX_1_MAP);
createVertex(connection, VERTEX_2, VERTEX_2_MAP);
final java.sql.ResultSet resultSet1 = getVertex(connection, VERTEX_1);
Assertions.assertNotNull(resultSet1);
Assertions.assertTrue(resultSet1.next());
validateResultSetColumns(resultSet1, VERTEX_1_MAP.keySet());
validateResultSetRows(resultSet1, VERTEX_1_MAP);
final java.sql.ResultSet resultSet2 = getVertex(connection, VERTEX_2);
Assertions.assertNotNull(resultSet2);
Assertions.assertTrue(resultSet2.next());
validateResultSetColumns(resultSet2, VERTEX_2_MAP.keySet());
validateResultSetRows(resultSet2, VERTEX_2_MAP);
final java.sql.ResultSet resultSetAll = getAll(connection);
Assertions.assertNotNull(resultSetAll);
Assertions.assertTrue(resultSetAll.next());
validateResultSetColumns(resultSetAll, ALL_MAP.keySet());
dropVertex(connection, VERTEX_1);
dropVertex(connection, VERTEX_2);
}
@Test
void driverManagerTest() throws SQLException {
final java.sql.Connection conn = DriverManager.getConnection(CONNECTION_STRING);
Assertions.assertTrue(conn.isValid(1));
}
@Test
void cancelQueryTest() throws SQLException {
final java.sql.Statement statement = connection.createStatement();
launchCancelThread(150, statement);
HelperFunctions.expectFunctionThrows(SqlError.QUERY_CANCELED, () -> statement.execute(getLongQuery()));
waitCancelToComplete();
}
}
| 7,293 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinStatementTest.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import software.aws.neptune.NeptuneStatementTestHelper;
import software.aws.neptune.gremlin.mock.MockGremlinDatabase;
import java.io.IOException;
import java.sql.SQLException;
import static software.aws.neptune.gremlin.GremlinHelper.getProperties;
// TODO AN-887: Fix query cancellation issue and enable tests.
@Disabled
public class GremlinStatementTest extends GremlinStatementTestBase {
private static final String HOSTNAME = "localhost";
private static final int PORT = 8181; // Mock server uses 8181.
private static final int MAX_CONTENT_LENGTH = 500000; // Took from PropertyGraphSerializationModule.
private static final int MAX_CONNECT_ATTEMPTS = 10;
private static NeptuneStatementTestHelper neptuneStatementTestHelper;
@BeforeEach
void initialize() throws SQLException, IOException, InterruptedException {
MockGremlinDatabase.startGraph();
final java.sql.Connection connection =
new GremlinConnection(
new GremlinConnectionProperties(getProperties(HOSTNAME, PORT, MAX_CONTENT_LENGTH)));
neptuneStatementTestHelper =
new NeptuneStatementTestHelper(connection.createStatement(), getLongQuery(), QUICK_QUERY);
boolean valid = false;
for (int i = 0; i < MAX_CONNECT_ATTEMPTS; i++) {
if (connection.isValid(1)) {
valid = true;
break;
}
}
if (!valid) {
throw new SQLException("Failed to establish a connection to the database.");
}
}
/**
* Function to get a shutdown database after testing.
*/
@AfterEach
void shutdownDatabase() throws IOException, InterruptedException {
MockGremlinDatabase.stopGraph();
}
@Test
void testCancelQueryWithoutExecute() {
neptuneStatementTestHelper.testCancelQueryWithoutExecute();
}
@Test
void testCancelQueryWhileExecuteInProgress() {
neptuneStatementTestHelper.testCancelQueryWhileExecuteInProgress();
}
@Test
void testCancelQueryTwice() {
neptuneStatementTestHelper.testCancelQueryTwice();
}
@Test
void testCancelQueryAfterExecuteComplete() {
neptuneStatementTestHelper.testCancelQueryAfterExecuteComplete();
}
}
| 7,294 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinStatementTestBase.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import software.aws.neptune.NeptuneStatementTestHelperBase;
public class GremlinStatementTestBase extends NeptuneStatementTestHelperBase {
protected static final String QUICK_QUERY;
protected static final int LONG_QUERY_NODE_COUNT = 500;
private static int currentIndex = 0;
static {
QUICK_QUERY = "g.V().valueMap()";
}
protected static String getLongQuery() {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("g");
for (int i = currentIndex; i < (currentIndex + LONG_QUERY_NODE_COUNT); i++) {
stringBuilder.append(String.format(".addV('%d')", i));
}
currentIndex += LONG_QUERY_NODE_COUNT;
return stringBuilder.toString();
}
}
| 7,295 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinManualNeptuneVerificationTest.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import software.aws.neptune.jdbc.utilities.AuthScheme;
import software.aws.neptune.jdbc.utilities.ConnectionProperties;
import software.aws.neptune.opencypher.utilities.OpenCypherGetColumnUtilities;
import java.sql.SQLException;
import java.util.Properties;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.CONTACT_POINT_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.ENABLE_SSL_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.PORT_KEY;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_HOSTNAME;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_PRIVATE_KEY_FILE;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_STRICT_HOST_KEY_CHECKING;
import static software.aws.neptune.jdbc.utilities.ConnectionProperties.SSH_USER;
@Disabled
public class GremlinManualNeptuneVerificationTest {
private static final String ENDPOINT = "database-1.cluster-cdffsmv2nzf7.us-east-2.neptune.amazonaws.com";
private static final String SAMPLE_QUERY = "g.V().count()";
private static final int PORT = 8182;
private static final String CREATE_NODES;
private static java.sql.Connection connection;
private static java.sql.DatabaseMetaData databaseMetaData;
static {
CREATE_NODES =
"g.addV('book').property('name', 'The French Chef Cookbook').property('year' , 1968).property('ISBN', '0-394-40135-2')";
}
@BeforeAll
static void initialize() throws SQLException {
final Properties properties = new Properties();
properties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.IAMSigV4); // set default to IAMSigV4
properties.put(CONTACT_POINT_KEY, ENDPOINT);
properties.put(PORT_KEY, PORT);
properties.put(ENABLE_SSL_KEY, true);
properties.put(SSH_USER, "ec2-user");
properties.put(SSH_HOSTNAME, "52.14.185.245");
properties.put(SSH_PRIVATE_KEY_FILE, "~/Downloads/EC2/neptune-test.pem");
properties.put(SSH_STRICT_HOST_KEY_CHECKING, "false");
connection = new GremlinConnection(new GremlinConnectionProperties(properties));
databaseMetaData = connection.getMetaData();
// connection.createStatement().executeQuery(CREATE_NODES);
}
@Disabled
@Test
void testGetColumns() throws SQLException {
final java.sql.ResultSet resultSet = databaseMetaData.getColumns(null, null, null, null);
Assertions.assertTrue(resultSet.next());
do {
for (final String columnName : OpenCypherGetColumnUtilities.COLUMN_NAMES) {
System.out.println(columnName + " - " + resultSet.getString(columnName));
}
} while (resultSet.next());
}
@Disabled
@Test
void testGetTables() throws SQLException {
final java.sql.ResultSet resultSet = databaseMetaData.getTables(null, null, null, null);
Assertions.assertTrue(resultSet.next());
do {
for (int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++) {
System.out.println(resultSet.getMetaData().getColumnName(i) + " - " + resultSet.getString(i));
}
} while (resultSet.next());
}
@Disabled
@Test
void testGetColumnsBook() throws SQLException {
final java.sql.ResultSet resultSet = databaseMetaData.getColumns(null, null, "book", null);
Assertions.assertTrue(resultSet.next());
do {
for (final String columnName : OpenCypherGetColumnUtilities.COLUMN_NAMES) {
System.out.println(columnName + " - " + resultSet.getString(columnName));
}
} while (resultSet.next());
}
@Disabled
@Test
void testGremlinDB() throws SQLException {
connection.createStatement().executeQuery("g.V().hasLabel(\"book\").valueMap()");
}
}
| 7,296 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinConnectionTest.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import software.aws.neptune.gremlin.mock.MockGremlinDatabase;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Properties;
import static software.aws.neptune.gremlin.GremlinHelper.getProperties;
public class GremlinConnectionTest {
private static final String HOSTNAME = "localhost";
private static final int PORT = 8181; // Mock server uses 8181.
private static final String QUERY = "1+1";
private static final Properties PROPERTIES = getProperties(HOSTNAME, PORT);
private java.sql.Connection connection;
/**
* Function to get a random available port and initialize database before testing.
*/
@BeforeAll
public static void initializeDatabase() throws IOException, InterruptedException {
MockGremlinDatabase.startGraph();
}
/**
* Function to get a shutdown database after testing.
*/
@AfterAll
public static void shutdownDatabase() throws IOException, InterruptedException {
MockGremlinDatabase.stopGraph();
}
@BeforeEach
void initialize() throws SQLException {
connection = new GremlinConnection(new GremlinConnectionProperties(PROPERTIES));
}
@AfterEach
void shutdown() throws SQLException {
connection.close();
}
@Test
void testGremlinDatabase() throws SQLException {
connection.createStatement().executeQuery(QUERY);
}
@Test
void testIsValid() throws SQLException {
Assertions.assertTrue(connection.isValid(1));
final Throwable negativeTimeout = Assertions.assertThrows(SQLException.class,
() -> connection.isValid(-1));
Assertions.assertEquals("Timeout value must be greater than or equal to 0",
negativeTimeout.getMessage());
final java.sql.Connection invalidConnection = new GremlinConnection(
new GremlinConnectionProperties(getProperties(HOSTNAME, 1234)));
Assertions.assertFalse(invalidConnection.isValid(1));
}
}
| 7,297 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinHelper.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import software.aws.neptune.gremlin.resultset.GremlinResultSet;
import software.aws.neptune.jdbc.utilities.AuthScheme;
import software.aws.neptune.jdbc.utilities.ConnectionProperties;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Map;
import java.util.Properties;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.CONTACT_POINT_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.ENABLE_SSL_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.MAX_CONTENT_LENGTH_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.PORT_KEY;
import static software.aws.neptune.gremlin.GremlinConnectionProperties.SSL_SKIP_VALIDATION_KEY;
public class GremlinHelper {
/**
* Function to get properties for Gremlin connection.
*
* @param hostname hostname for properties.
* @param port port number for properties.
* @return Properties for Gremlin connection.
*/
public static Properties getProperties(final String hostname, final int port) {
final Properties properties = new Properties();
properties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.None); // set default to None
properties.put(CONTACT_POINT_KEY, hostname);
properties.put(PORT_KEY, port);
properties.put(ENABLE_SSL_KEY, false);
properties.put(SSL_SKIP_VALIDATION_KEY, true);
return properties;
}
/**
* Function to get properties for Gremlin connection.
*
* @param hostname hostname for properties.
* @param port port number for properties.
* @param maxContentLength max content length for properties.
* @return Properties for Gremlin connection.
*/
public static Properties getProperties(final String hostname, final int port, final int maxContentLength) {
final Properties properties = getProperties(hostname, port);
properties.put(MAX_CONTENT_LENGTH_KEY, maxContentLength);
return properties;
}
/**
* Function to construct Gremlin query that creates vertex.
*
* @param label Vertex label.
* @param properties Map containing Vertex properties.
* @return Gremlin query that creates vertex.
*/
public static String createVertexQuery(final String label, final Map<String, ?> properties) {
final String q = "\"";
final StringBuilder sb = new StringBuilder();
sb.append("g.addV(").append(q).append(label).append(q).append(")");
for (final Map.Entry<String, ?> entry : properties.entrySet()) {
sb.append(".property(")
.append(q).append(entry.getKey()).append(q)
.append(", ");
if (entry.getValue() instanceof String) {
sb.append(q).append(entry.getValue()).append(q);
} else {
if (entry.getValue() instanceof Float) {
sb.append(((Float)entry.getValue()).floatValue());
} else if (entry.getValue() instanceof Double) {
sb.append(((Double)entry.getValue()).doubleValue());
} else {
sb.append(entry.getValue());
}
}
sb.append(")");
}
System.out.println("Query: " + sb.toString());
return sb.toString();
}
/**
* Function to construct Gremlin query that gets vertex.
*
* @param label Vertex label.
* @return Gremlin query that gets vertex.
*/
public static String getVertexQuery(final String label) {
return String.format("g.V().hasLabel(\"%s\").valueMap().by(unfold())", label);
}
/**
* Function to construct Gremlin query that drops vertex.
*
* @param label Vertex label.
* @return Gremlin query that drops vertex.
*/
public static String dropVertexQuery(final String label) {
return String.format("g.V().hasLabel(\"%s\").drop().iterate()", label);
}
/**
* Function that creates vertex.
*
* @param connection Gremlin database connection.
* @param label Vertex label.
* @param properties Map containing Vertex properties.
* @throws SQLException if fails to create vertex.
*/
public static void createVertex(final Connection connection,
final String label,
final Map<String, Object> properties) throws SQLException {
connection.createStatement()
.executeQuery(createVertexQuery(label, properties));
}
/**
* Function that gets vertex.
*
* @param connection Gremlin database connection.
* @param label Vertex label.
* @return Gremlin result set containing vertex.
* @throws SQLException if fails to get vertex.
*/
public static GremlinResultSet getVertex(final Connection connection,
final String label) throws SQLException {
return (GremlinResultSet) connection.createStatement()
.executeQuery(getVertexQuery(label));
}
/**
* Function that drops vertex.
*
* @param connection Gremlin database connection.
* @param label Vertex label.
* @throws SQLException if fails to drop vertex.
*/
public static void dropVertex(final Connection connection,
final String label) throws SQLException {
connection.createStatement().executeQuery(dropVertexQuery(label));
}
/**
* Function that gets all data from database.
*
* @param connection Gremlin database connection.
* @return Gremlin result set containing vertex.
* @throws SQLException if fails to get data.
*/
public static GremlinResultSet getAll(final Connection connection) throws SQLException {
return (GremlinResultSet) connection.createStatement()
.executeQuery("g.V().valueMap().by(unfold())");
}
/**
* Function that drops all data from database.
*
* @param connection Gremlin database connection.
* @throws SQLException if fails to drop data.
*/
public static void dropAll(final Connection connection) throws SQLException {
connection.createStatement().executeQuery("g.V().drop().iterate()");
}
}
| 7,298 |
0 | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune | Create_ds/amazon-neptune-jdbc-driver/src/test/java/software/aws/neptune/gremlin/GremlinConnectionPropertiesTest.java | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.aws.neptune.gremlin;
import com.google.common.collect.ImmutableList;
import io.netty.handler.ssl.SslContext;
import org.apache.tinkerpop.gremlin.driver.LoadBalancingStrategy;
import org.apache.tinkerpop.gremlin.driver.MessageSerializer;
import org.apache.tinkerpop.gremlin.driver.ser.Serializers;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import software.aws.neptune.ConnectionPropertiesTestBase;
import software.aws.neptune.jdbc.helpers.HelperFunctions;
import software.aws.neptune.jdbc.utilities.AuthScheme;
import software.aws.neptune.jdbc.utilities.ConnectionProperties;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.util.List;
import java.util.Properties;
import static org.mockito.Mockito.mock;
/**
* Test for GremlinConnectionProperties.
*/
class GremlinConnectionPropertiesTest extends ConnectionPropertiesTestBase {
private static final int MAX_CONTENT_LENGTH = 1073741824;
private GremlinConnectionProperties connectionProperties;
private int randomIntValue;
protected void assertDoesNotThrowOnNewConnectionProperties(final Properties properties) {
Assertions.assertDoesNotThrow(() -> {
// Since we have added the check for service region and IAMSigV4 is set by default, we need to add a mock
// region property here in case the system running these tests does not have SERVICE_REGION variable set.
properties.put("serviceRegion", "mock-region");
connectionProperties = new GremlinConnectionProperties(properties);
});
}
protected void assertThrowsOnNewConnectionProperties(final Properties properties) {
Assertions.assertThrows(SQLException.class,
() -> connectionProperties = new GremlinConnectionProperties(properties));
}
protected <T> void assertPropertyValueEqualsToExpected(final String key, final T expectedValue) {
Assertions.assertEquals(expectedValue, connectionProperties.get(key));
}
@BeforeEach
void beforeEach() {
randomIntValue = HelperFunctions.randomPositiveIntValue(1000);
}
@Test
void testDefaultValues() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(GremlinConnectionProperties.DEFAULT_CONNECTION_TIMEOUT_MILLIS,
connectionProperties.getConnectionTimeoutMillis());
Assertions.assertEquals(GremlinConnectionProperties.DEFAULT_CONNECTION_RETRY_COUNT,
connectionProperties.getConnectionRetryCount());
Assertions.assertEquals(GremlinConnectionProperties.DEFAULT_AUTH_SCHEME, connectionProperties.getAuthScheme());
Assertions.assertEquals("", connectionProperties.getContactPoint());
Assertions.assertEquals(GremlinConnectionProperties.DEFAULT_PATH, connectionProperties.getPath());
Assertions.assertEquals(GremlinConnectionProperties.DEFAULT_PORT, connectionProperties.getPort());
Assertions.assertEquals(GremlinConnectionProperties.DEFAULT_ENABLE_SSL, connectionProperties.getEnableSsl());
}
@Test
void testApplicationName() throws SQLException {
testStringPropertyViaConstructor(
GremlinConnectionProperties.APPLICATION_NAME_KEY);
final String testValue = "test application name";
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setApplicationName(testValue);
Assertions.assertEquals(testValue, connectionProperties.getApplicationName());
}
@Test
void testConnectionTimeout() throws SQLException {
testIntegerPropertyViaConstructor(
GremlinConnectionProperties.CONNECTION_TIMEOUT_MILLIS_KEY,
GremlinConnectionProperties.DEFAULT_CONNECTION_TIMEOUT_MILLIS);
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setConnectionTimeoutMillis(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getConnectionTimeoutMillis());
}
@Test
void testConnectionRetryCount() throws SQLException {
testIntegerPropertyViaConstructor(
GremlinConnectionProperties.CONNECTION_RETRY_COUNT_KEY,
GremlinConnectionProperties.DEFAULT_CONNECTION_RETRY_COUNT);
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setConnectionRetryCount(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getConnectionRetryCount());
}
@Test
void testAuthScheme() throws SQLException {
testAuthSchemeViaConstructor();
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setAuthScheme(AuthScheme.None);
Assertions.assertEquals(AuthScheme.None, connectionProperties.getAuthScheme());
}
@Test
void testContactPoint() throws SQLException {
testStringPropertyViaConstructor(
GremlinConnectionProperties.CONTACT_POINT_KEY,
ConnectionPropertiesTestBase.DEFAULT_EMPTY_STRING);
final String testValue = "test contact point";
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setContactPoint(testValue);
Assertions.assertEquals(testValue, connectionProperties.getContactPoint());
}
@Test
void testPath() throws SQLException {
testStringPropertyViaConstructor(
GremlinConnectionProperties.PATH_KEY,
GremlinConnectionProperties.DEFAULT_PATH);
final String testValue = "test path";
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setPath(testValue);
Assertions.assertEquals(testValue, connectionProperties.getPath());
}
@Test
void testPort() throws SQLException {
testIntegerPropertyViaConstructor(
GremlinConnectionProperties.PORT_KEY,
GremlinConnectionProperties.DEFAULT_PORT);
final int testValue = 12345;
connectionProperties = new GremlinConnectionProperties();
connectionProperties.setPort(testValue);
Assertions.assertEquals(testValue, connectionProperties.getPort());
}
@Test
void testSerializerObject() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNotNull(connectionProperties.getSerializerEnum());
final MessageSerializer serializer = mock(MessageSerializer.class);
Assertions.assertDoesNotThrow(
() -> connectionProperties.setSerializer(serializer)
);
Assertions.assertTrue(connectionProperties.isSerializerObject());
Assertions.assertFalse(connectionProperties.isSerializerEnum());
Assertions.assertFalse(connectionProperties.isSerializerString());
Assertions.assertEquals(serializer, connectionProperties.getSerializerObject());
}
@Test
void testSerializerEnum() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNotNull(connectionProperties.getSerializerEnum());
final Serializers serializer = Serializers.GRAPHSON_V2D0;
Assertions.assertDoesNotThrow(
() -> connectionProperties.setSerializer(serializer)
);
Assertions.assertTrue(connectionProperties.isSerializerEnum());
Assertions.assertFalse(connectionProperties.isSerializerObject());
Assertions.assertFalse(connectionProperties.isSerializerString());
Assertions.assertEquals(serializer, connectionProperties.getSerializerEnum());
}
@Test
void testSerializerString() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNotNull(connectionProperties.getSerializerString());
final String serializer = "test serializer";
Assertions.assertDoesNotThrow(
() -> connectionProperties.setSerializer(serializer)
);
Assertions.assertTrue(connectionProperties.isSerializerString());
Assertions.assertFalse(connectionProperties.isSerializerEnum());
Assertions.assertFalse(connectionProperties.isSerializerObject());
Assertions.assertEquals(serializer, connectionProperties.getSerializerString());
}
@Test
void testEnableSsl() throws SQLException {
Properties initProperties = new Properties();
initProperties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.None); // reset to None
testBooleanPropertyViaConstructor(
initProperties,
GremlinConnectionProperties.ENABLE_SSL_KEY,
GremlinConnectionProperties.DEFAULT_ENABLE_SSL);
initProperties = new Properties();
initProperties.put(GremlinConnectionProperties.ENABLE_SSL_KEY, true);
initProperties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.IAMSigV4);
assertDoesNotThrowOnNewConnectionProperties(initProperties);
initProperties = new Properties();
initProperties.put(GremlinConnectionProperties.ENABLE_SSL_KEY, false);
initProperties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.IAMSigV4);
assertThrowsOnNewConnectionProperties(initProperties);
initProperties = new Properties();
initProperties.put(ConnectionProperties.AUTH_SCHEME_KEY, AuthScheme.None);
assertDoesNotThrowOnNewConnectionProperties(initProperties);
final ImmutableList<Boolean> boolValues = ImmutableList.of(true, false);
for (final Boolean boolValue : boolValues) {
connectionProperties.setEnableSsl(boolValue);
Assertions.assertEquals(boolValue, connectionProperties.getEnableSsl());
}
}
@Test
void testSslContext() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getSslContext());
final SslContext sslContext = mock(SslContext.class);
Assertions.assertDoesNotThrow(
() -> connectionProperties.setSslContext(sslContext)
);
Assertions.assertEquals(sslContext, connectionProperties.getSslContext());
}
@Test
void testSslEnabledProtocols() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getSslEnabledProtocols());
final List<String> sslEnabledProtocols = ImmutableList.of("test sslEnabledProtocols");
Assertions.assertDoesNotThrow(
() -> connectionProperties.setSslEnabledProtocols(sslEnabledProtocols)
);
Assertions.assertEquals(sslEnabledProtocols, connectionProperties.getSslEnabledProtocols());
}
@Test
void testSslCipherSuites() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getSslCipherSuites());
final List<String> sslCipherSuites = ImmutableList.of("test sslCipherSuites");
Assertions.assertDoesNotThrow(
() -> connectionProperties.setSslCipherSuites(sslCipherSuites)
);
Assertions.assertEquals(sslCipherSuites, connectionProperties.getSslCipherSuites());
}
@Test
void testSslSkipCertValidation() throws SQLException {
testBooleanPropertyViaConstructor(
GremlinConnectionProperties.SSL_SKIP_VALIDATION_KEY,
GremlinConnectionProperties.DEFAULT_SSL_SKIP_VALIDATION);
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(DEFAULT_FALSE, connectionProperties.getSslSkipCertValidation());
final ImmutableList<Boolean> boolValues = ImmutableList.of(true, false);
for (final Boolean boolValue : boolValues) {
connectionProperties.setSslSkipCertValidation(boolValue);
Assertions.assertEquals(boolValue, connectionProperties.getSslSkipCertValidation());
}
}
@Test
void testKeyStore() throws SQLException {
final String testValue = "test key store";
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getKeyStore());
connectionProperties.setKeyStore(testValue);
Assertions.assertEquals(testValue, connectionProperties.getKeyStore());
}
@Test
void testKeyStorePassword() throws SQLException {
final String testValue = "test key store password";
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getKeyStorePassword());
connectionProperties.setKeyStorePassword(testValue);
Assertions.assertEquals(testValue, connectionProperties.getKeyStorePassword());
}
@Test
void testKeyStoreType() throws SQLException {
final String testValue = "test key store type";
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getKeyStoreType());
connectionProperties.setKeyStoreType(testValue);
Assertions.assertEquals(testValue, connectionProperties.getKeyStoreType());
}
@Test
void testTrustStore() throws SQLException {
final String testValue = "test trust store";
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getTrustStore());
connectionProperties.setTrustStore(testValue);
Assertions.assertEquals(testValue, connectionProperties.getTrustStore());
}
@Test
void testTrustStorePassword() throws SQLException {
final String testValue = "test trust store password";
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getTrustStorePassword());
connectionProperties.setTrustStorePassword(testValue);
Assertions.assertEquals(testValue, connectionProperties.getTrustStorePassword());
}
@Test
void testTrustStoreType() throws SQLException {
final String testValue = "test trust store type";
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getTrustStoreType());
connectionProperties.setTrustStoreType(testValue);
Assertions.assertEquals(testValue, connectionProperties.getTrustStoreType());
}
@Test
void testNioPoolSize() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getNioPoolSize());
connectionProperties.setNioPoolSize(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getNioPoolSize());
}
@Test
void testWorkerPoolSize() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getWorkerPoolSize());
connectionProperties.setWorkerPoolSize(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getWorkerPoolSize());
}
@Test
void testMaxConnectionPoolSize() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMaxConnectionPoolSize());
connectionProperties.setMaxConnectionPoolSize(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMaxConnectionPoolSize());
}
@Test
void testMinConnectionPoolSize() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMinConnectionPoolSize());
connectionProperties.setMinConnectionPoolSize(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMinConnectionPoolSize());
}
@Test
void testMaxInProcessPerConnection() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMaxInProcessPerConnection());
connectionProperties.setMaxInProcessPerConnection(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMaxInProcessPerConnection());
}
@Test
void testMinInProcessPerConnection() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMinInProcessPerConnection());
connectionProperties.setMinInProcessPerConnection(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMinInProcessPerConnection());
}
@Test
void testMaxSimultaneousUsagePerConnection() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMaxSimultaneousUsagePerConnection());
connectionProperties.setMaxSimultaneousUsagePerConnection(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMaxSimultaneousUsagePerConnection());
}
@Test
void testMinSimultaneousUsagePerConnection() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMinSimultaneousUsagePerConnection());
connectionProperties.setMinSimultaneousUsagePerConnection(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMinSimultaneousUsagePerConnection());
}
@Test
void testChannelizerGeneric() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getChannelizerGeneric());
final Class<?> channelizer = Object.class;
Assertions.assertDoesNotThrow(
() -> connectionProperties.setChannelizer(channelizer)
);
Assertions.assertTrue(connectionProperties.isChannelizerGeneric());
Assertions.assertFalse(connectionProperties.isChannelizerString());
Assertions.assertEquals(channelizer, connectionProperties.getChannelizerGeneric());
}
@Test
void testChannelizerString() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getChannelizerString());
final String channelizer = "test channelizer";
Assertions.assertDoesNotThrow(
() -> connectionProperties.setChannelizer(channelizer)
);
Assertions.assertTrue(connectionProperties.isChannelizerString());
Assertions.assertFalse(connectionProperties.isChannelizerGeneric());
Assertions.assertEquals(channelizer, connectionProperties.getChannelizerString());
}
@Test
void testKeepAliveInterval() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getKeepAliveInterval());
connectionProperties.setKeepAliveInterval(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getKeepAliveInterval());
}
@Test
void testResultIterationBatchSize() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getResultIterationBatchSize());
connectionProperties.setResultIterationBatchSize(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getResultIterationBatchSize());
}
@Test
void testMaxWaitForConnection() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMaxWaitForConnection());
connectionProperties.setMaxWaitForConnection(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMaxWaitForConnection());
}
@Test
void testMaxWaitForClose() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getMaxWaitForClose());
connectionProperties.setMaxWaitForClose(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMaxWaitForClose());
}
@Test
void testMaxContentLength() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(MAX_CONTENT_LENGTH, connectionProperties.getMaxContentLength());
connectionProperties.setMaxContentLength(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getMaxContentLength());
}
@Test
void testValidationRequest() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getValidationRequest());
final String validationRequest = "test validationRequest";
connectionProperties.setValidationRequest(validationRequest);
Assertions.assertEquals(validationRequest, connectionProperties.getValidationRequest());
}
@Test
void testReconnectInterval() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertEquals(NO_DEFAULT_INT, connectionProperties.getReconnectInterval());
connectionProperties.setReconnectInterval(randomIntValue);
Assertions.assertEquals(randomIntValue, connectionProperties.getReconnectInterval());
}
@Test
void testLoadBalancingStrategy() throws SQLException {
connectionProperties = new GremlinConnectionProperties();
Assertions.assertNull(connectionProperties.getLoadBalancingStrategy());
final LoadBalancingStrategy strategy = mock(LoadBalancingStrategy.class);
Assertions.assertDoesNotThrow(
() -> connectionProperties.setLoadBalancingStrategy(strategy)
);
Assertions.assertEquals(strategy, connectionProperties.getLoadBalancingStrategy());
}
@Test
void testDisableEncryptionWithIAMSigV4() throws SQLException {
final Properties properties = new Properties();
properties.put("authScheme", "IAMSigV4");
properties.put("enableSsl", true);
properties.put("serviceRegion", "mock-region");
connectionProperties = new GremlinConnectionProperties(properties);
Assertions.assertTrue(connectionProperties.getEnableSsl());
Assertions.assertEquals(connectionProperties.getAuthScheme(), AuthScheme.IAMSigV4);
Assertions.assertThrows(SQLClientInfoException.class, () -> connectionProperties.setEnableSsl(false));
Assertions.assertDoesNotThrow(() -> connectionProperties.setAuthScheme(AuthScheme.None));
Assertions.assertDoesNotThrow(() -> connectionProperties.setEnableSsl(false));
}
@Test
void testEnableIAMSigV4WithoutEncrpytion() throws SQLException {
final Properties properties = new Properties();
properties.put("authScheme", "None");
properties.put("enableSsl", false);
connectionProperties = new GremlinConnectionProperties(properties);
Assertions.assertFalse(connectionProperties.getEnableSsl());
Assertions.assertEquals(connectionProperties.getAuthScheme(), AuthScheme.None);
Assertions.assertThrows(SQLClientInfoException.class, () -> connectionProperties.setAuthScheme(AuthScheme.IAMSigV4));
Assertions.assertDoesNotThrow(() -> connectionProperties.setEnableSsl(true));
Assertions.assertDoesNotThrow(() -> connectionProperties.setAuthScheme(AuthScheme.IAMSigV4));
}
}
| 7,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.