index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/InvalidTierNumberException.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
public class InvalidTierNumberException extends TaskQueueException {
public InvalidTierNumberException(int number, int total) {
super("Invalid tier number " + number + ", must be <" + total);
}
public InvalidTierNumberException(int number, int total, Throwable t) {
super("Invalid tier number " + number + ", must be <" + total, t);
}
}
| 9,200 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/QueuableTask.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
import com.netflix.fenzo.TaskRequest;
/**
* A queuable task extends {@link TaskRequest} with a method to get attributes for the queue.
*/
public interface QueuableTask extends TaskRequest {
/**
* Get the attributes for the queue that the task belongs to.
* @return The queue attributes for this task.
*/
QAttributes getQAttributes();
/**
* Get the time at which this task is ready for consideration for assignment. This can be compared to system's
* current time, for example, via {@link System#currentTimeMillis()}, to determine if the task is ready for being
* considered for assignment. If the returned time is less than current time, then it is not ready. A return time
* of <code>0</code> implies that the task is ready now. Tasks that are not ready in a scheduling iteration may
* be skipped to be considered in the next scheduling iteration.
* @return Time in milli seconds when this task is ready, or <code>0</code> to indicate it is ready.
*/
default long getReadyAt() {
return 0L;
}
/**
* Safely set the ready time of this task. Generally, task objects in Fenzo are immutable once added into Fenzo's
* queue. That is, the scheduling iteration will access methods from the task and expect to obtain valid results
* without being impacted by concurrent modifications. This method provides a safe mechanism to set the ready at
* time via one or both of the following:
* <UL>
* <LI>Fenzo calls this method when it is safe to do so, for all tasks for which setting a new ready time has
* been requested via
* {@link com.netflix.fenzo.TaskSchedulingService#setTaskReadyTime(String, QAttributes, long)}</LI>,
* <LI>The implementation of this interface supports safe concurrent access to {@link this#getReadyAt()}</LI>.
* </UL>
* Fenzo guarantees the first of the above two options, thereby keeping the system safe in the absence of the second
* option. That is, the implementations don't need to keep this method concurrent safe with the corresponding access
* method if they always set the ready time using
* {@link com.netflix.fenzo.TaskSchedulingService#setTaskReadyTime(String, QAttributes, long)}
* @param when The time at which this task is ready for consideration for resource assignment.
*/
default void safeSetReadyAt(long when) {
// no-op
}
}
| 9,201 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/InternalTaskQueue.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
import java.util.Collection;
import java.util.Map;
/**
* This interface defines the behavior of task queues required for Fenzo internally. Implementations of TaskQueue must
* implement this interface to be usable by {@link com.netflix.fenzo.TaskScheduler}.
* <P>
* Methods in this interface are expected to be called concurrently. For example, tasks may be added to or removed from
* the queue while a scheduling iteration using this queue is in progress. Implementations must handle this. Note that,
* it may not be sufficient for the implementations to use concurrent versions of collection classes for queue of tasks.
* The queue must be consistent throughout the scheduling iteration. One recommended way to achieve such consistency is
* to place the {@link #queueTask(QueuableTask)} operations as requests in a holding area within the implementation and
* return immediately. Later, actually carry them out during the {@link #reset()} method.
*/
public interface InternalTaskQueue extends TaskQueue {
/**
* Reset the queue and make it ready for next scheduling iteration. Any operations requested that were not safe
* to carry out during a scheduling iteration can be carried out during this method, before the next
* scheduling iteration begins.
* @return {@code true} if the queue was changed as part of this operation, {@code false} otherwise. The queue is
* deemed changed if any queue modifications that were held for safety are carried out during this method, such as
* adding to or removing from the queue.
* @throws TaskQueueMultiException If any exceptions that may have occurred during resetting the pointer to the head
* of the queue. Or, this may include exceptions that arose when applying any deferred operations from
* {@link #queueTask(QueuableTask)} method.
*/
boolean reset() throws TaskQueueMultiException;
/**
* Get the usage tracker, if any. Queue implementations may request updates for usage tracking purposes. If
* provided, then {@link com.netflix.fenzo.TaskScheduler} will call the appropriate methods of the tracker
* as scheduling assignments are taking place. This can help the queue implementations, for example, in maintaining
* any fairness for resource usage across multiple entities within the queue. If this method returns
* {@code null}, then the scheduler will ignore usage tracking for this queue.
* <P>
* Note that the implementations of {@link UsageTrackedQueue} must be efficient since they are called from
* within the scheduling iteration.
* @return The object to use for calling usage tracking triggers.
*/
UsageTrackedQueue getUsageTracker();
/**
* Get all of the tasks in the queue. Consistent state of the queue is returned when this is called outside of
* scheduling iteration runs. Calling this concurrently with a scheduling iteration results in an exception.
* @return List of all tasks in queue as a {@link Map} with {@link TaskState} as key and {@link Collection} of
* {@link QueuableTask} as values.
* @throws TaskQueueException when called concurrently with a scheduling iteration in progress.
*/
Map<TaskState, Collection<QueuableTask>> getAllTasks() throws TaskQueueException;
}
| 9,202 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/QAttributes.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
/**
* Attributes for a queue. A queue is comprised of attributes: a bucket name and a tier number. Generally, queues are
* placed into one or more tiers, each of which have one or more buckets. The tiers and buckets can be associated with
* some aspects of scheduling, such as for capacity guarantees. However, queue implementations are free to interpret
* tier and bucket to suit their needs.
*/
public interface QAttributes {
/**
* Get the queue's bucket name.
* @return Name of the queue's bucket.
*/
String getBucketName();
/**
* Get the tier number for the queue. Queues belong to a tier represented by a number. In general, lower numbers
* are ahead in the order of tiers.
* @return The tier number for the corresponding queue.
*/
int getTierNumber();
/**
* A convenience class for creating {@link QAttributes} instances.
*/
class QAttributesAdaptor implements QAttributes {
private final int tierNumber;
private final String bucketName;
public QAttributesAdaptor(int tierNumber, String bucketName) {
this.bucketName = bucketName;
this.tierNumber = tierNumber;
}
@Override
public String getBucketName() {
return bucketName;
}
@Override
public int getTierNumber() {
return tierNumber;
}
}
/**
* A convenience class to represent the tuple of task id and {@link QAttributes}.
*/
class TaskIdAttributesTuple {
private final String id;
private final QAttributes qAttributes;
public TaskIdAttributesTuple(String id, QAttributes qAttributes) {
this.id = id;
this.qAttributes = qAttributes;
}
public String getId() {
return id;
}
public QAttributes getqAttributes() {
return qAttributes;
}
}
}
| 9,203 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/TaskQueueMultiException.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
import java.util.List;
/**
* An exception that wraps multiple exceptions caught during a scheduling iteration of the queue, to be returned
* at the end of scheduling iteration.
*/
public class TaskQueueMultiException extends Exception {
private final List<Exception> exceptions;
public TaskQueueMultiException(List<Exception> exceptions) {
super("Multiple task queue exceptions");
this.exceptions = exceptions;
}
public List<Exception> getExceptions() {
return exceptions;
}
}
| 9,204 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/UsageTrackedQueue.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
import com.netflix.fenzo.VMResource;
import com.netflix.fenzo.sla.ResAllocs;
import java.util.Collection;
import java.util.Map;
/**
* This interface represents a queue entity whose usage is tracked. Some of the methods in this class are called
* during a scheduling iteration, and, therefore, implementations must be efficient so as to not slow down the
* scheduler.
* <P>
* There are effectively two sets of methods. One set is called during a scheduling iteration. These are:
* <UL>
* <LI>{@link #nextTaskToLaunch()}</LI>
* <LI>{@link #assignTask(QueuableTask)}</LI>
* </UL>
* The other set is called between consecutive scheduling iterations. Scheduling iteration is marked as completed
* by calling {@link #reset()}. The methods in this set are:
* <UL>
* <LI>{@link #queueTask(QueuableTask)}</LI>
* <LI>{@link #launchTask(QueuableTask)}</LI>
* <LI>{@link #removeTask(String, QAttributes)}</LI>
* </UL>
*/
public interface UsageTrackedQueue {
class ResUsage {
private final ResAllocs resAllocsWrapper;
private double cpus=0.0;
private double memory=0.0;
private double networkMbps=0.0;
private double disk=0.0;
public ResUsage() {
resAllocsWrapper = new ResAllocs() {
@Override
public String getTaskGroupName() {
return "usage";
}
@Override
public double getCores() {
return cpus;
}
@Override
public double getMemory() {
return memory;
}
@Override
public double getNetworkMbps() {
return networkMbps;
}
@Override
public double getDisk() {
return disk;
}
};
}
public void addUsage(QueuableTask task) {
cpus += task.getCPUs();
memory += task.getMemory();
networkMbps += task.getNetworkMbps();
disk += task.getDisk();
}
public void remUsage(QueuableTask task) {
cpus -= task.getCPUs();
memory -= task.getMemory();
networkMbps -= task.getNetworkMbps();
disk -= task.getDisk();
}
public ResAllocs getResAllocsWrapper() {
return resAllocsWrapper;
}
public double getCpus() {
return cpus;
}
public double getMemory() {
return memory;
}
public double getNetworkMbps() {
return networkMbps;
}
public double getDisk() {
return disk;
}
public double getDominantResUsageFrom(ResAllocs totalResources) {
double tCPU = totalResources.getCores();
double max = tCPU > 0.0 ? cpus / tCPU : cpus;
double tMemory = totalResources.getMemory();
double tmp = tMemory > 0.0? memory / tMemory : memory;
max = Math.max(max, tmp);
double tNetwork = totalResources.getNetworkMbps();
tmp = tNetwork > 0.0? networkMbps / tNetwork : networkMbps;
max = Math.max(max, tmp);
double tDisk = totalResources.getDisk();
tmp = tDisk > 0.0? disk / tDisk : disk;
max = Math.max(max, tmp);
return max;
}
}
/**
* Add the given task to the queue. Tasks can be added to the queue only while the queue isn't being iterated
* upon for a scheduling loop. If it is, then this method throws an exception.
* @param t The task to add to the queue.
* @throws TaskQueueException if either the task already exists in the queue or if the queue is being iterated on.
*/
void queueTask(QueuableTask t) throws TaskQueueException;
/**
* Get the next task to assign resources to. This method is called from within a scheduling iteration to assign
* resources to it. The scheduling iteration calls this method repeatedly, until a {@code null} is returned. The
* first call to this method marks the queue as being iterated upon for scheduling. The {@link #reset()} method
* must be called to mark the end of the scheduling iteration, after which other queue modification methods such as
* {@link #queueTask(QueuableTask)}, {@link #launchTask(QueuableTask)}, and {@link #removeTask(String, QAttributes)}
* can be called.
* @return The next task or a task with an assignment failure, if the task cannot be scheduled due to some
* internal constraints (for example exceeds allowed resource usage for a queue).
* Returns {@code null} if there are no tasks left to assign resources to.
* @throws TaskQueueException if there was an error getting next task from the queue.
*/
Assignable<QueuableTask> nextTaskToLaunch() throws TaskQueueException;
/**
* Mark the given task to be assigned resources. Assignment is a step within a scheduling iteration. The resources
* assigned to the task are committed from the perspective of total resource usage. This method can be called only
* while a queue is being iterated upon, from within a scheduling iteration. Calling it outside of an iteration
* results in an exception being thrown. A call to {@link #nextTaskToLaunch()} marks the queue as being iterated on.
* @param t The task to be marked as assigned.
* @throws TaskQueueException if this method was called outside of a scheduling loop.
*/
void assignTask(QueuableTask t) throws TaskQueueException;
/**
* Mark the given task as launched. That is, the task is now sent to the agent for running, so the resources
* are fully committed for usage until the task is removed from the queue. Tasks can be launched in two scenarios:
* a) after assigning resources to all tasks, the scheduling loop ends and tasks are launched, or b) service is
* initialized and tasks previously known to be running are marked as launched. Resource usage is tracked in the
* queues, which is correctly updated only once for a task even if both {@link #assignTask(QueuableTask)} and
* this method are called for the same task. This method indicates if the resource usage totals were updated
* during this call.
* @param t The task to launch.
* @return True if resources for this task were actually added to total usage, false if not.
* @throws TaskQueueException if the queue is being iterated on for a scheduling iteration.
*/
boolean launchTask(QueuableTask t) throws TaskQueueException;
/**
* Remove the given task from the queue, irrespective of whether it is queued or launched. This cannot be called
* while the queue is being iterated on, for example, in a scheduling loop. {@link #reset()} must be called before
* calling this method.
* @param id The task id to remove
* @param qAttributes The queue attributes for the task to remove
* @return {@link QueuableTask} that was removed, or {@code null} if the task wasn't found.
* @throws TaskQueueException if the queue is being iterated on for a scheduling iteration.
*/
QueuableTask removeTask(String id, QAttributes qAttributes) throws TaskQueueException;
/**
* Set the ready for the given task.
* @see QueuableTask#getReadyAt()
* @param taskId
* @param qAttributes
* @param when
*/
void setTaskReadyTime(String taskId, QAttributes qAttributes, long when) throws TaskQueueException;
/**
* Get the usage of the dominant resource, expressed as a share of the total known available resources.
* @return The dominant resource usage.
*/
double getDominantUsageShare();
/**
* Reset the queue to mark the end of a scheduling iteration.
*/
void reset();
/**
* Get list of all tasks grouped by their state. The list is expected to be consistent, without any transitionary
* affects from an ongoing scheduling iteration. This must be called only from outside of a scheduling iteration.
* @return List of all tasks in queue as a {@link Map} with {@link TaskQueue.TaskState} as key and {@link Collection} of
* {@link QueuableTask} as values.
* @throws TaskQueueException if called concurrently with a scheduling iteration in progress.
*/
Map<TaskQueue.TaskState, Collection<QueuableTask>> getAllTasks() throws TaskQueueException;
/**
* Set the map of total resources available. This is required to evaluate the dominant resource share used that may
* be used by some queue implementations for fair share purposes.
* @param totalResourcesMap Map of total resources to set.
*/
void setTotalResources(Map<VMResource, Double> totalResourcesMap);
}
| 9,205 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/TaskQueue.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues;
import com.netflix.fenzo.TaskIterator;
import com.netflix.fenzo.TaskScheduler;
import com.netflix.fenzo.functions.Action1;
import java.util.Collection;
import java.util.Map;
/**
* This interface defines a task queue that contains all tasks that are either pending resource allocation or assigned
* resources while continuing to run. When using task queues, tasks are input into Fenzo once. The tasks are then
* maintained in the queue until the task is explicitly removed due to no longer requiring resource assignment, or if
* the task completed for any reason.
* <P>
* Methods in this interface are expected to be called concurrently. For example, tasks may be added to or removed from
* the queue while a scheduling iteration using this queue is in progress. Implementations must handle this.
*/
public interface TaskQueue extends TaskIterator {
/**
* Tasks in a queue are said to be in one of two states. The {@link #QUEUED} state represents tasks pending
* resource assignment. Where as, the {@link #LAUNCHED} state represents tasks that have been assigned resources,
* such tasks may be either already executing or pending launch. This is used primarily for conveying the state of
* the tasks via the callback passed to {@link com.netflix.fenzo.TaskSchedulingService#requestAllTasks(Action1)}.
*/
enum TaskState { QUEUED, LAUNCHED }
/**
* Add a task to the queue. Duplicates are not allowed, as in, a task request that has the same Id as another
* existing element will be rejected. The added task will be assigned resources by a scheduler. To add a task
* into Fenzo that is already running from before, use
* {@link com.netflix.fenzo.TaskSchedulingService#initializeRunningTask(QueuableTask, String)}.
* <P>
* This operation is designed to be performed asynchronously, when it is safe to modify the queue. The queue
* implementations generally do not modify the queue while a scheduling iteration is in progress.
* @param task A task to add to the queue.
*/
void queueTask(QueuableTask task);
/**
* Set SLA for the queue. The queue implementation determines the implementation of {@link TaskQueueSla} that is
* accepted.
* @param sla The SLA to set for the queue.
* @throws IllegalArgumentException if the implementation of the {@link TaskQueueSla} is incompatible with the
* queue implementation.
*/
void setSla(TaskQueueSla sla) throws IllegalArgumentException;
}
| 9,206 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/SortedBuckets.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import com.netflix.fenzo.queues.UsageTrackedQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* The buckets are sorted using a comparison method that makes it inconsistent with equals. This class maintains
* the ordering while being able to also perform methods such as contains, add, and remove without depending on
* comparisons being strictly consistent with equals. Comparisons use the bucket's resource usage values, where as
* the equals is defined by the bucket name being equal. Duplicate entries with the same bucket name are not allowed.
* There may be multiple buckets with the same resource usage values, and therefore, comparator returns 0 while the
* equals check returns {@code true} only when the bucket name matches. The Collections classes such as SortedMap or
* SortedSet cannot be used due to this inconsistency.
* <P>
* This implementation provides {@code O(logN)} performance for adding an item, constant-time performance for get,
* and {@code O(logN+M)} performance for remove, where, {@code N} is the number of items in the collection and
* {@code M} is the average number of items with same resource usage values.
* <P>
* This implementation is not synchronized. Invocations of methods of this class must be synchronized externally if
* there is a chance of calling them concurrently.
*/
class SortedBuckets {
// TODO performance can be improved by changing List<> here to a two level map - outer map will have keys
// of bucket's resource usage and values will be a Map<String, QueueBucket>.
private static final Logger logger = LoggerFactory.getLogger(SortedBuckets.class);
private final List<QueueBucket> buckets;
private final Map<String, QueueBucket> bucketMap;
private final Comparator<QueueBucket> comparator;
private final UsageTrackedQueue.ResUsage parentUsage;
SortedBuckets(final UsageTrackedQueue.ResUsage parentUsage) {
buckets = new ArrayList<>();
bucketMap = new HashMap<>();
comparator = new Comparator<QueueBucket>() {
@Override
public int compare(QueueBucket o1, QueueBucket o2) {
return Double.compare(o1.getDominantUsageShare(), o2.getDominantUsageShare());
}
};
this.parentUsage = parentUsage;
}
boolean add(QueueBucket bucket) {
if (bucketMap.containsKey(bucket.getName()))
return false;
if (buckets.isEmpty())
buckets.add(bucket);
else
buckets.add(findInsertionPoint(bucket, buckets), bucket);
bucketMap.put(bucket.getName(), bucket);
return true;
}
QueueBucket remove(String bucketName) {
final QueueBucket bucket = bucketMap.get(bucketName);
if (bucket == null)
return null;
final int index = findInsertionPoint(bucket, buckets);
if (index < 0)
throw new IllegalStateException("Unexpected: bucket with name=" + bucketName + " does not exist");
// we have now found a bucket that has the same position due to its usage value. The actual bucket we are
// interested in (with the same name) may be the same one or it may be to the left or right a few positions.
int remPos = buckets.get(index).getName().equals(bucketName)? index : -1;
if (remPos < 0)
remPos = findWalkingLeft(buckets, index, bucketName, bucket.getDominantUsageShare());
if (remPos < 0)
remPos = findWalkingRight(buckets, index, bucketName, bucket.getDominantUsageShare());
if (remPos < 0) {
logger.error("Unexpected: bucket with name=" + bucketName + " not found to remove, traversing " +
buckets.size() + " buckets to remove it");
logger.warn("Invalid sorted buckets list: " + getBucketsListString());
removeBucketAndResort(bucketName);
}
else
buckets.remove(remPos);
bucketMap.remove(bucketName);
return bucket;
}
private void removeBucketAndResort(String bucketName) {
// workaround the problem: linear traversal of the list to remove the bucket and re-sort if needed
// also check on uniqueness of bucket names in the list
final HashSet<String> names = new HashSet<>();
if (!buckets.isEmpty()) {
final Iterator<QueueBucket> iterator = buckets.iterator();
QueueBucket prev = null;
boolean isSorted = true;
while (iterator.hasNext()) {
QueueBucket b = iterator.next();
if (!names.add(b.getName())) {
logger.error("Bucket " + b.getName() + " already existed in the list, removing");
isSorted = false;
iterator.remove();
}
else if (b.getName().equals(bucketName)) {
iterator.remove();
} else {
if (prev != null) {
final int compare = comparator.compare(prev, b);
isSorted = isSorted && compare <= 0;
}
prev = b;
}
}
logger.warn("Re-sorting buckets list");
resort();
}
}
private String getBucketsListString() {
StringBuilder builder = new StringBuilder("[");
for (QueueBucket b: buckets) {
builder.append(b.getName()).append(":").append(b.getDominantUsageShare()).append(", ");
}
builder.append("]");
return builder.toString();
}
QueueBucket get(String bucketName) {
return bucketMap.get(bucketName);
}
private int findWalkingRight(List<QueueBucket> buckets, int index, String bucketName, double dominantUsageShare) {
int pos = index;
while (++pos < buckets.size() && buckets.get(pos).getDominantUsageShare() == dominantUsageShare) {
if (buckets.get(pos).getName().equals(bucketName))
return pos;
}
return -1;
}
private int findWalkingLeft(List<QueueBucket> buckets, int index, String bucketName, double dominantUsageShare) {
int pos = index;
while (--pos >= 0 && buckets.get(pos).getDominantUsageShare() == dominantUsageShare) {
if (buckets.get(pos).getName().equals(bucketName))
return pos;
}
return -1;
}
private int findInsertionPoint(QueueBucket bucket, List<QueueBucket> buckets) {
final int i = Collections.binarySearch(buckets, bucket, comparator);
if (i >= 0)
return i;
return -i - 1;
}
List<QueueBucket> getSortedList() {
return Collections.unmodifiableList(buckets);
}
void resort() {
List<QueueBucket> old = new ArrayList<>(buckets);
bucketMap.clear();
buckets.clear();
for(QueueBucket b: old)
add(b);
}
}
| 9,207 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/TieredQueueSlas.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import com.netflix.fenzo.queues.TaskQueueSla;
import com.netflix.fenzo.sla.ResAllocs;
import com.netflix.fenzo.sla.ResAllocsUtil;
import java.util.HashMap;
import java.util.Map;
public class TieredQueueSlas implements TaskQueueSla {
private final Map<Integer, TierSla> slas;
public TieredQueueSlas(Map<Integer, ResAllocs> tierCapacities, Map<Integer, Map<String, ResAllocs>> slas) {
Map<Integer, TierSla> tmpResAllocsMap = new HashMap<>();
if (!slas.isEmpty()) {
for (Map.Entry<Integer, Map<String, ResAllocs>> entry : slas.entrySet()) {
int tierNumber = entry.getKey();
final Map<String, ResAllocs> tierAllocs = entry.getValue();
TierSla tierSla = new TierSla();
tierSla.setTierCapacity(getOrComputeTierCapacity(tierNumber, tierCapacities.get(tierNumber), tierAllocs));
for (Map.Entry<String, ResAllocs> e : tierAllocs.entrySet()) {
tierSla.setAlloc(e.getKey(), e.getValue());
}
tmpResAllocsMap.put(tierNumber, tierSla);
}
}
tierCapacities.forEach((tierIndex, capacity) -> {
if (!tmpResAllocsMap.containsKey(tierIndex)) {
TierSla tierSla = new TierSla();
tierSla.setTierCapacity(tierCapacities.get(tierIndex));
tmpResAllocsMap.put(tierIndex, tierSla);
}
});
this.slas = tmpResAllocsMap;
}
private ResAllocs getOrComputeTierCapacity(int tierNumber, ResAllocs tierCapacity, Map<String, ResAllocs> queueAllocs) {
if (tierCapacity != null) {
return tierCapacity;
}
String tierName = "tier#" + tierNumber;
if (queueAllocs.isEmpty()) {
return ResAllocsUtil.emptyOf(tierName);
}
return queueAllocs.values().stream().reduce(ResAllocsUtil.emptyOf(tierName), ResAllocsUtil::add);
}
/* package */ Map<Integer, TierSla> getSlas() {
return slas;
}
}
| 9,208 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/Tier.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import java.util.*;
import java.util.function.BiFunction;
import com.netflix.fenzo.AssignmentFailure;
import com.netflix.fenzo.VMResource;
import com.netflix.fenzo.queues.*;
import com.netflix.fenzo.sla.ResAllocs;
import com.netflix.fenzo.sla.ResAllocsUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class represents a tier of the multi-tiered queue that {@link TieredQueue} represents. The tier holds one or
* more buckets for queues, {@link QueueBucket} and maintains them in an order defined by the dynamic value for
* dominant resource usage via {@link SortedBuckets}. The values are dynamically updated via the implementation of
* {@link UsageTrackedQueue} this class provides.
*/
class Tier implements UsageTrackedQueue {
private static final Logger logger = LoggerFactory.getLogger(Tier.class);
private final int tierNumber;
private final String tierName;
private TierSla tierSla;
private final ResUsage totals;
private ResAllocs tierResources = null;
private ResAllocs effectiveUsedResources;
private ResAllocs remainingResources = null;
private final Map<String, ResAllocs> lastEffectiveUsedResources = new HashMap<>();
private final SortedBuckets sortedBuckets;
private Map<VMResource, Double> currTotalResourcesMap = new HashMap<>();
private final BiFunction<Integer, String, Double> allocsShareGetter;
Tier(int tierNumber, BiFunction<Integer, String, Double> allocsShareGetter) {
this.tierNumber = tierNumber;
this.tierName = "tier#" + tierNumber;
this.totals = new ResUsage();
this.effectiveUsedResources = ResAllocsUtil.emptyOf(tierName);
// TODO: need to consider the impact of this comparator to any others we may want, like simple round robin.
// Use DRF sorting. Except, note that it is undefined when two entities compare to 0 (equal values) which
// one gets ahead of the other.
sortedBuckets = new SortedBuckets(totals);
this.allocsShareGetter = allocsShareGetter;
}
void setTierSla(TierSla tierSla) {
this.tierSla = tierSla;
if (tierSla == null) {
sortedBuckets.getSortedList().forEach(bucket -> bucket.setBucketGuarantees(null));
tierResources = ResAllocsUtil.emptyOf(tierName);
} else {
sortedBuckets.getSortedList().forEach(bucket -> bucket.setBucketGuarantees(tierSla.getBucketAllocs(bucket.getName())));
// Always create a bucket, if there is SLA defined for it for proper accounting
tierSla.getAllocsMap().keySet().forEach(this::getOrCreateBucket);
this.tierResources = tierSla.getTierCapacity();
}
this.effectiveUsedResources = ResAllocsUtil.emptyOf(tierName);
this.lastEffectiveUsedResources.clear();
for (QueueBucket bucket : sortedBuckets.getSortedList()) {
effectiveUsedResources = ResAllocsUtil.add(effectiveUsedResources, bucket.getEffectiveUsage());
lastEffectiveUsedResources.put(bucket.getName(), bucket.getEffectiveUsage());
}
this.remainingResources = ResAllocsUtil.subtract(tierResources, effectiveUsedResources);
sortedBuckets.resort();
}
private QueueBucket getOrCreateBucket(QueuableTask t) {
if (t == null)
throw new NullPointerException();
return getOrCreateBucket(t.getQAttributes().getBucketName());
}
private QueueBucket getOrCreateBucket(String bucketName) {
QueueBucket bucket = sortedBuckets.get(bucketName);
if (bucket == null) {
bucket = new QueueBucket(tierNumber, bucketName, totals, allocsShareGetter);
sortedBuckets.add(bucket);
bucket.setBucketGuarantees(tierSla == null ? null : tierSla.getBucketAllocs(bucketName));
}
return bucket;
}
public int getTierNumber() {
return tierNumber;
}
@Override
public void queueTask(QueuableTask t) throws TaskQueueException {
getOrCreateBucket(t).queueTask(t);
}
@Override
public Assignable<QueuableTask> nextTaskToLaunch() throws TaskQueueException {
for (QueueBucket bucket : sortedBuckets.getSortedList()) {
final Assignable<QueuableTask> taskOrFailure = bucket.nextTaskToLaunch();
if (taskOrFailure != null) {
if (taskOrFailure.hasFailure()) {
return taskOrFailure;
}
QueuableTask task = taskOrFailure.getTask();
if (bucket.hasGuaranteedCapacityFor(task)) {
return taskOrFailure;
}
if (remainingResources == null || ResAllocsUtil.isBounded(task, remainingResources)) {
return taskOrFailure;
}
return Assignable.error(task, new AssignmentFailure(VMResource.ResAllocs, 0, 0, 0,
"No guaranteed capacity left for queue."
+ "\n" + bucket.getBucketCapacityAsString()
+ "\n" + getTierCapacityAsString()
));
}
}
return null;
}
@Override
public void assignTask(QueuableTask t) throws TaskQueueException {
// assigning the task changes resource usage and therefore, sorting order must be updated.
// We do this by removing the bucket from sortedBuckets, assigning the task in the bucket,
// then adding the bucket back into the sortedBuckets. It will then fall into its right new place.
// This operation therefore takes time complexity of O(log N).
final QueueBucket bucket = sortedBuckets.remove(t.getQAttributes().getBucketName());
if (bucket == null)
throw new TaskQueueException("Invalid to not find bucket to assign task id=" + t.getId());
try {
bucket.assignTask(t);
addUsage(bucket, t);
} finally {
sortedBuckets.add(bucket);
}
}
@Override
public boolean launchTask(QueuableTask t) throws TaskQueueException {
// launching the task changes the resource usage and therefore sorting order must be updated.
// We do this by removing the bucket from the sortedBuckets, launching the task in the bucket,
// then adding the bucket back into the sortedBuckets. It will then fall into its right new place.
// This operation therefore takes time complexity of O(log N).
if (logger.isDebugEnabled())
logger.debug("Adding " + t.getId() + ": to ordered buckets: " + getSortedListString());
final String bucketName = t.getQAttributes().getBucketName();
QueueBucket bucket = sortedBuckets.remove(bucketName);
if (bucket == null) {
bucket = new QueueBucket(tierNumber, bucketName, totals, allocsShareGetter);
}
try {
if (bucket.launchTask(t)) {
addUsage(bucket, t);
return true;
}
} finally {
sortedBuckets.add(bucket);
}
return false;
}
private void verifySortedBuckets() throws TaskQueueException {
if (sortedBuckets.getSortedList().isEmpty())
return;
List<QueueBucket> list = new ArrayList<>(sortedBuckets.getSortedList());
if (list.size() > 1) {
QueueBucket prev = list.get(0);
for (int i = 1; i < list.size(); i++) {
if (list.get(i).getDominantUsageShare() < prev.getDominantUsageShare()) {
final String msg = "Incorrect sorting order : " + getSortedListString();
throw new TaskQueueException(msg);
}
prev = list.get(i);
}
}
}
@Override
public QueuableTask removeTask(String id, QAttributes qAttributes) throws TaskQueueException {
// removing a task can change the resource usage and therefore the sorting order of queues. So, we take the
// same approach as in launchTask() above - remove the bucket and readd to keep sorting order updated.
final QueueBucket bucket = sortedBuckets.remove(qAttributes.getBucketName());
if (bucket == null)
return null;
final QueuableTask removed;
try {
removed = bucket.removeTask(id, qAttributes);
if (removed != null) {
removeUsage(bucket, removed);
}
} finally {
if (bucket.size() > 0 || (tierSla != null && tierSla.getBucketAllocs(bucket.getName()) != null))
sortedBuckets.add(bucket);
}
return removed;
}
private void addUsage(QueueBucket bucket, QueuableTask t) {
totals.addUsage(t);
updateEffectiveBucketTotals(bucket);
}
private void removeUsage(QueueBucket bucket, QueuableTask removed) {
totals.remUsage(removed);
updateEffectiveBucketTotals(bucket);
}
private void updateEffectiveBucketTotals(QueueBucket bucket) {
ResAllocs lastEffective = lastEffectiveUsedResources.get(bucket.getName());
if (lastEffective != null) {
effectiveUsedResources = ResAllocsUtil.subtract(effectiveUsedResources, lastEffective);
}
lastEffectiveUsedResources.put(bucket.getName(), bucket.getEffectiveUsage());
effectiveUsedResources = ResAllocsUtil.add(effectiveUsedResources, bucket.getEffectiveUsage());
if (tierResources == null) {
remainingResources = null;
} else {
remainingResources = ResAllocsUtil.subtract(tierResources, effectiveUsedResources);
}
}
@Override
public double getDominantUsageShare() {
return 0.0; // undefined for a tier
}
@Override
public void setTaskReadyTime(String taskId, QAttributes qAttributes, long when) throws TaskQueueException {
final QueueBucket bucket = sortedBuckets.get(qAttributes.getBucketName());
if (bucket != null)
bucket.setTaskReadyTime(taskId, qAttributes, when);
}
@Override
public void reset() {
if (logger.isDebugEnabled()) {
try {
verifySortedBuckets();
} catch (TaskQueueException e) {
logger.error(e.getMessage());
}
}
for (QueueBucket bucket : sortedBuckets.getSortedList()) {
bucket.reset();
}
}
private String getSortedListString() {
StringBuilder b = new StringBuilder("Tier " + tierNumber + " sortedBs: [");
for (QueueBucket bucket : sortedBuckets.getSortedList()) {
b.append(bucket.getName()).append(" (").append(bucket.getDominantUsageShare()).append("), ");
}
b.append("]");
return b.toString();
}
private String getTierCapacityAsString() {
StringBuilder sb = new StringBuilder();
if (tierResources != null) {
sb.append("Tier ").append(tierNumber).append(" Total Capacity: ").append(tierResources.getAsString());
}
if (effectiveUsedResources != null) {
sb.append("\nTier ").append(tierNumber).append(" Used Capacity: ").append(effectiveUsedResources.getAsString());
}
if (remainingResources != null) {
sb.append("\nTier ").append(tierNumber).append(" Remaining Capacity: ").append(remainingResources.getAsString());
}
return sb.toString();
}
@Override
public void setTotalResources(Map<VMResource, Double> totalResourcesMap) {
if (totalResMapChanged(currTotalResourcesMap, totalResourcesMap)) {
currTotalResourcesMap.clear();
currTotalResourcesMap.putAll(totalResourcesMap);
for (QueueBucket b : sortedBuckets.getSortedList()) {
b.setTotalResources(tierResources);
}
logger.debug("Re-sorting buckets in tier " + tierNumber + " after totals changed");
sortedBuckets.resort();
}
}
private boolean totalResMapChanged(Map<VMResource, Double> currTotalResourcesMap, Map<VMResource, Double> totalResourcesMap) {
if (currTotalResourcesMap.size() != totalResourcesMap.size())
return true;
Set<VMResource> curr = new HashSet<>(currTotalResourcesMap.keySet());
for (VMResource r : totalResourcesMap.keySet()) {
final Double c = currTotalResourcesMap.get(r);
final Double n = totalResourcesMap.get(r);
if ((c == null && n != null) || (c != null && n == null) || (n != null && !n.equals(c)))
return true;
curr.remove(r);
}
return !curr.isEmpty();
}
@Override
public Map<TaskQueue.TaskState, Collection<QueuableTask>> getAllTasks() throws TaskQueueException {
Map<TaskQueue.TaskState, Collection<QueuableTask>> result = new HashMap<>();
for (QueueBucket bucket : sortedBuckets.getSortedList()) {
final Map<TaskQueue.TaskState, Collection<QueuableTask>> allTasks = bucket.getAllTasks();
if (!allTasks.isEmpty()) {
for (TaskQueue.TaskState s : TaskQueue.TaskState.values()) {
final Collection<QueuableTask> q = allTasks.get(s);
if (q != null && !q.isEmpty()) {
Collection<QueuableTask> resQ = result.get(s);
if (resQ == null) {
resQ = new LinkedList<>();
result.put(s, resQ);
}
resQ.addAll(q);
}
}
}
}
return result;
}
}
| 9,209 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/TieredQueue.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import com.netflix.fenzo.VMResource;
import com.netflix.fenzo.queues.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.BiFunction;
/**
* A tiered queuing system where queues are arranged in multiple tiers and then among multiple buckets within each tier.
* Tiers represent coarse grain priority, in which higher tier's queues are considered for resource assignment
* before any of the lower tiers' queues are considered. Within a tier, multiple queues are considered for resource
* assignment such that their dominant resource usage shares are similar. For example, a queue bucket using 60% of the
* total memory in use is said to be similar in usage to another bucket using 60% of the total CPUs in use, even if the
* latter's memory usage is, say, only 10%. The tiers are numbered {@code 0} to {@code N-1} for {@code N} tiers, with
* {@code 0} being the highest priority level.
*/
public class TieredQueue implements InternalTaskQueue {
private static final Logger logger = LoggerFactory.getLogger(TieredQueue.class);
private final List<Tier> tiers;
private Iterator<Tier> iterator = null;
private Tier currTier = null;
private final BlockingQueue<QueuableTask> tasksToQueue;
private final BlockingQueue<TieredQueueSlas> slasQueue;
private final TierSlas tierSlas = new TierSlas();
private final BiFunction<Integer, String, Double> allocsShareGetter = tierSlas::getBucketAllocation;
/**
* Construct a tiered queue system with the given number of tiers.
* @param numTiers The number of tiers.
*/
public TieredQueue(int numTiers) {
tiers = new ArrayList<>(numTiers);
for ( int i=0; i<numTiers; i++ )
tiers.add(new Tier(i, allocsShareGetter));
tasksToQueue = new LinkedBlockingQueue<>();
slasQueue = new LinkedBlockingQueue<>();
}
public int getNumTiers() {
return tiers.size();
}
@Override
public void queueTask(QueuableTask task) {
tasksToQueue.offer(task);
}
@Override
public void setSla(TaskQueueSla sla) throws IllegalArgumentException {
if (sla != null && !(sla instanceof TieredQueueSlas)) {
throw new IllegalArgumentException("Queue SLA must be an instance of " + TieredQueueSlas.class.getName() +
", can't accept " + sla.getClass().getName());
}
slasQueue.offer(sla == null? new TieredQueueSlas(Collections.emptyMap(), Collections.emptyMap()) : (TieredQueueSlas)sla);
}
private void setSlaInternal() {
if (slasQueue.peek() != null) {
List<TieredQueueSlas> slas = new ArrayList<>();
slasQueue.drainTo(slas);
tierSlas.setAllocations(slas.get(slas.size()-1)); // set the last one
tiers.forEach(tier -> tier.setTierSla(tierSlas.getTierSla(tier.getTierNumber())));
}
}
private void addInternal(QueuableTask task) throws TaskQueueException {
final int tierNumber = task.getQAttributes().getTierNumber();
if ( tierNumber >= tiers.size() )
throw new InvalidTierNumberException(tierNumber, tiers.size());
tiers.get(tierNumber).queueTask(task);
}
/**
* This implementation dynamically picks the next task to consider for resource assignment based on tiers and then
* based on current dominant resource usage. The usage is updated with each resource assignment during the
* scheduling iteration, in addition to updating with all running jobs from before.
* @return The next task to assign resources to, a task with assignment failure if the task cannot be scheduled due to some
* internal constraints (for example exceeds allowed resource usage for a queue). Returns {@code null} if none
* remain for consideration.
* @throws TaskQueueException if there is an unknown error getting the next task to launch from any of the tiers or
* queue buckets.
*/
@Override
public Assignable<QueuableTask> next() throws TaskQueueException {
if (iterator == null) {
iterator = tiers.iterator();
currTier = null;
}
if (currTier != null) {
final Assignable<QueuableTask> taskOrFailure = currTier.nextTaskToLaunch();
if (taskOrFailure != null)
return taskOrFailure;
currTier = null; // currTier all done
}
while (currTier == null && iterator.hasNext()) {
if(iterator.hasNext()) {
currTier = iterator.next();
final Assignable<QueuableTask> taskOrFailure = currTier.nextTaskToLaunch();
if (taskOrFailure != null)
return taskOrFailure;
else
currTier = null; // currTier is done
}
}
return null;
}
@Override
public boolean reset() throws TaskQueueMultiException {
setSlaInternal();
iterator = null;
boolean queueChanged = false;
List<Exception> exceptions = new LinkedList<>();
if (tasksToQueue.peek() != null) {
final List<QueuableTask> toQueue = new LinkedList<>();
tasksToQueue.drainTo(toQueue);
if (!toQueue.isEmpty()) {
for (QueuableTask t : toQueue)
try {
addInternal(t);
queueChanged = true;
} catch (TaskQueueException e) {
exceptions.add(e);
}
}
}
if (!exceptions.isEmpty())
throw new TaskQueueMultiException(exceptions);
return queueChanged;
}
/**
* This method provides a bridge to the usage tracked queues contained within the tiered queues implementation.
* @return Implementation for {@link UsageTrackedQueue} to account for all the queues within this tiered queue
* implementation. This implementation focuses on usage tracking only and therefore does not allow invoking
* {@link UsageTrackedQueue#nextTaskToLaunch()} and {@link UsageTrackedQueue#getAllTasks()}.
*/
@Override
public UsageTrackedQueue getUsageTracker() {
return new UsageTrackedQueue() {
@Override
public void queueTask(QueuableTask t) throws TaskQueueException {
tiers.get(t.getQAttributes().getTierNumber()).queueTask(t);
}
@Override
public Assignable<QueuableTask> nextTaskToLaunch() {
return null;
}
@Override
public void assignTask(QueuableTask t) throws TaskQueueException {
tiers.get(t.getQAttributes().getTierNumber()).assignTask(t);
}
@Override
public boolean launchTask(QueuableTask t) throws TaskQueueException {
return tiers.get(t.getQAttributes().getTierNumber()).launchTask(t);
}
@Override
public QueuableTask removeTask(String id, QAttributes qAttributes) throws TaskQueueException {
return tiers.get(qAttributes.getTierNumber()).removeTask(id, qAttributes);
}
@Override
public double getDominantUsageShare() {
return 0.0;
}
@Override
public void setTaskReadyTime(String taskId, QAttributes qAttributes, long when) throws TaskQueueException {
tiers.get(qAttributes.getTierNumber()).setTaskReadyTime(taskId, qAttributes, when);
}
@Override
public void reset() {
for(Tier tb: tiers)
tb.reset();
}
@Override
public Map<TaskState, Collection<QueuableTask>> getAllTasks() {
throw new UnsupportedOperationException();
}
@Override
public void setTotalResources(Map<VMResource, Double> totalResourcesMap) {
for (Tier t: tiers)
t.setTotalResources(totalResourcesMap);
}
};
}
@Override
public Map<TaskState, Collection<QueuableTask>> getAllTasks() {
Map<TaskState, Collection<QueuableTask>> result = new HashMap<>();
for (Tier tb: tiers) {
try {
final Map<TaskState, Collection<QueuableTask>> allTasks = tb.getAllTasks();
if (!allTasks.isEmpty()) {
for (TaskState s: TaskState.values()) {
final Collection<QueuableTask> t = allTasks.get(s);
if (t != null && !t.isEmpty()) {
Collection<QueuableTask> st = result.get(s);
if (st == null) {
st = new LinkedList<>();
result.put(s, st);
}
st.addAll(t);
}
}
}
} catch (TaskQueueException e) {
logger.error("Unexpected: " + e.getMessage(), e);
}
}
return result;
}
}
| 9,210 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/TierSla.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import com.netflix.fenzo.sla.ResAllocs;
import com.netflix.fenzo.sla.ResAllocsUtil;
import java.util.HashMap;
import java.util.Map;
/* package */ class TierSla {
// small allocation for a bucket with no defiend allocation
static final double eps = 0.001;
private ResAllocs tierCapacity;
private final Map<String, ResAllocs> allocsMap = new HashMap<>();
private double totalCpu = 0.0;
private double totalMem = 0.0;
private double totalNetwork = 0.0;
private double totalDisk = 0.0;
void setTierCapacity(ResAllocs tierCapacity) {
this.tierCapacity = tierCapacity;
}
void setAlloc(String bucket, ResAllocs value) {
final ResAllocs prev = allocsMap.put(bucket, value);
if (prev != null)
subtract(prev);
add(value);
}
private void add(ResAllocs value) {
totalCpu += value.getCores();
totalMem += value.getMemory();
totalNetwork += value.getNetworkMbps();
totalDisk += value.getDisk();
}
private void subtract(ResAllocs value) {
totalCpu -= value.getCores();
totalMem -= value.getMemory();
totalNetwork -= value.getNetworkMbps();
totalDisk -= value.getDisk();
}
public ResAllocs getTierCapacity() {
return tierCapacity;
}
ResAllocs getBucketAllocs(String bucketName) {
return allocsMap.computeIfAbsent(bucketName, name -> ResAllocsUtil.emptyOf(bucketName));
}
Map<String, ResAllocs> getAllocsMap() {
return allocsMap;
}
/**
* Evaluate the allocation share of a bucket among all the buckets for which allocations are defined. If there are
* no allocations setup, return 1.0, implying 100%. If no allocation is setup for the given <code>bucket</code>,
* return a small value. Otherwise, calculate the share percentage of each resource cpu, memory, network, and disk
* from the total and return the maximum of these shares.
*
* @param bucket Name of the bucket.
* @return Allocation share for the bucket.
*/
double evalAllocationShare(String bucket) {
if (allocsMap.isEmpty()) {
return 1.0; // special case if there are no allocations setup
}
final ResAllocs resAllocs = allocsMap.get(bucket);
if (resAllocs == null)
return totalCpu < (1.0 / eps) ? eps : 1.0 / totalCpu; // arbitrarily base it on cpus
double val = totalCpu < 1.0 ? eps : resAllocs.getCores() / totalCpu;
val = Math.max(val, totalMem < 1.0 ? eps : resAllocs.getMemory() / totalMem);
val = Math.max(val, totalNetwork < 1.0 ? eps : resAllocs.getNetworkMbps() / totalNetwork);
return Math.max(val, totalDisk < 1.0 ? eps : resAllocs.getDisk() / totalDisk);
}
}
| 9,211 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/QueueBucket.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import com.netflix.fenzo.VMResource;
import com.netflix.fenzo.queues.*;
import com.netflix.fenzo.queues.TaskQueue;
import com.netflix.fenzo.sla.ResAllocs;
import com.netflix.fenzo.sla.ResAllocsUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.function.BiFunction;
/**
* A queue bucket is a collection of tasks in one bucket. Generally, all tasks in the bucket are associated
* with a single entity for scheduling purposes such as capacity guarantees.
*/
class QueueBucket implements UsageTrackedQueue {
private static final Logger logger = LoggerFactory.getLogger(QueueBucket.class);
private final int tierNumber;
private final String name;
private final ResUsage totals;
private final ResAllocs emptyBucketGuarantees;
private ResAllocs bucketGuarantees;
private ResAllocs effectiveUsage;
private final LinkedHashMap<String, QueuableTask> queuedTasks;
private final LinkedHashMap<String, QueuableTask> launchedTasks;
// Assigned tasks is a temporary holder for tasks being assigned resources during scheduling
// iteration. These tasks are duplicate entries of tasks in queuedTasks, which cannot be removed from queuedTasks
// collection in order to keep the iterator on queuedTasks consistent throughout the scheduling iteration. Remember
// that scheduler's taskTracker will trigger call into assignTask() during the scheduling iteration.
private final LinkedHashMap<String, QueuableTask> assignedTasks;
private Iterator<Map.Entry<String, QueuableTask>> iterator = null;
private ResAllocs tierResources;
private final BiFunction<Integer, String, Double> allocsShareGetter;
private final ResUsage tierUsage;
QueueBucket(int tierNumber, String name, ResUsage tierUsage, BiFunction<Integer, String, Double> allocsShareGetter) {
this.tierNumber = tierNumber;
this.name = name;
this.tierUsage = tierUsage;
totals = new ResUsage();
this.emptyBucketGuarantees = ResAllocsUtil.emptyOf(name);
bucketGuarantees = emptyBucketGuarantees;
queuedTasks = new LinkedHashMap<>();
launchedTasks = new LinkedHashMap<>();
assignedTasks = new LinkedHashMap<>();
this.allocsShareGetter = allocsShareGetter == null ?
(integer, s) -> 1.0 :
allocsShareGetter;
}
void setBucketGuarantees(ResAllocs bucketGuarantees) {
this.bucketGuarantees = bucketGuarantees == null ? emptyBucketGuarantees : bucketGuarantees;
updateEffectiveUsage();
}
ResAllocs getBucketGuarantees() {
return bucketGuarantees;
}
@Override
public void queueTask(QueuableTask t) throws TaskQueueException {
if (iterator != null)
throw new ConcurrentModificationException("Must reset before queuing tasks");
if (queuedTasks.get(t.getId()) != null)
throw new TaskQueueException("Duplicate task not allowed, task with id " + t.getId());
if (launchedTasks.get(t.getId()) != null)
throw new TaskQueueException("Task already launched, can't queue, id=" + t.getId());
queuedTasks.put(t.getId(), t);
}
@Override
public Assignable<QueuableTask> nextTaskToLaunch() throws TaskQueueException {
if (iterator == null) {
iterator = queuedTasks.entrySet().iterator();
if (!assignedTasks.isEmpty())
throw new TaskQueueException(assignedTasks.size() + " tasks still assigned but not launched");
}
while (iterator.hasNext()) {
final Map.Entry<String, QueuableTask> nextTask = iterator.next();
if (nextTask.getValue().getReadyAt() <= System.currentTimeMillis())
return Assignable.success(nextTask.getValue());
}
return null;
}
@Override
public void assignTask(QueuableTask t) throws TaskQueueException {
if (iterator == null)
throw new TaskQueueException(new IllegalStateException("assign called on task " + t.getId() + " while not iterating over tasks"));
if (queuedTasks.get(t.getId()) == null)
throw new TaskQueueException("Task not in queue for assigning, id=" + t.getId());
if (assignedTasks.get(t.getId()) != null)
throw new TaskQueueException("Task already assigned, id=" + t.getId());
if (launchedTasks.get(t.getId()) != null)
throw new TaskQueueException("Task already launched, id=" + t.getId());
assignedTasks.put(t.getId(), t);
addUsage(t);
}
@Override
public boolean launchTask(QueuableTask t) throws TaskQueueException {
if (iterator != null)
throw new ConcurrentModificationException("Must reset before launching tasks");
if (launchedTasks.get(t.getId()) != null)
throw new TaskQueueException("Task already launched, id=" + t.getId());
queuedTasks.remove(t.getId());
final QueuableTask removed = assignedTasks.remove(t.getId());
launchedTasks.put(t.getId(), t);
if (removed == null) { // queueTask usage only if it was not assigned, happens when initializing tasks that were running previously
addUsage(t);
return true;
}
return false;
}
@Override
public QueuableTask removeTask(String id, QAttributes qAttributes) throws TaskQueueException {
if (iterator != null)
throw new TaskQueueException("Must reset before removing tasks");
QueuableTask removed = queuedTasks.remove(id);
if (removed == null) {
removed = assignedTasks.remove(id);
if (removed == null)
removed = launchedTasks.remove(id);
if (removed != null)
removeUsage(removed);
}
return removed;
}
private void addUsage(QueuableTask t) {
totals.addUsage(t);
updateEffectiveUsage();
}
private void removeUsage(QueuableTask removed) {
totals.remUsage(removed);
updateEffectiveUsage();
}
private void updateEffectiveUsage() {
effectiveUsage = ResAllocsUtil.ceilingOf(totals.getResAllocsWrapper(), bucketGuarantees);
}
@Override
public double getDominantUsageShare() {
// If total tier capacity is not available, use current tier allocation as a base for share computation.
ResAllocs total = tierResources == null ? tierUsage.getResAllocsWrapper() : tierResources;
return totals.getDominantResUsageFrom(total) /
Math.max(TierSla.eps / 10.0, allocsShareGetter.apply(tierNumber, name));
}
@Override
public void setTaskReadyTime(String taskId, QAttributes qAttributes, long when) throws TaskQueueException {
if (iterator != null)
throw new TaskQueueException("Must reset before setting task ready time");
final QueuableTask task = queuedTasks.get(taskId);
if (task != null)
task.safeSetReadyAt(when);
}
public boolean hasGuaranteedCapacityFor(QueuableTask task) {
// Check first if we are already above the limit
if (!ResAllocsUtil.isBounded(totals.getResAllocsWrapper(), bucketGuarantees)) {
return false;
}
// We have some remaining guaranteed resources. Now check if they are enough for our task.
ResAllocs summed = ResAllocsUtil.add(totals.getResAllocsWrapper(), task);
return ResAllocsUtil.isBounded(summed, bucketGuarantees);
}
public ResAllocs getEffectiveUsage() {
return effectiveUsage;
}
public String getBucketCapacityAsString() {
StringBuilder sb = new StringBuilder();
if (bucketGuarantees != null) {
sb.append("Bucket ").append(name).append(" Total Capacity: ").append(bucketGuarantees.getAsString());
}
if (effectiveUsage != null) {
sb.append("\nBucket ").append(name).append(" Used Capacity: ").append(effectiveUsage.getAsString());
}
return sb.toString();
}
@Override
public void reset() {
iterator = null;
}
@Override
public Map<TaskQueue.TaskState, Collection<QueuableTask>> getAllTasks() throws TaskQueueException {
if (iterator != null)
throw new TaskQueueException("Must reset before getting list of tasks");
Map<TaskQueue.TaskState, Collection<QueuableTask>> result = new HashMap<>();
result.put(TaskQueue.TaskState.QUEUED, Collections.unmodifiableCollection(queuedTasks.values()));
result.put(TaskQueue.TaskState.LAUNCHED, Collections.unmodifiableCollection(launchedTasks.values()));
return result;
}
@Override
public void setTotalResources(Map<VMResource, Double> totalResourcesMap) {
this.tierResources = ResAllocsUtil.toResAllocs("tier", totalResourcesMap);
}
public void setTotalResources(ResAllocs tierResources) {
this.tierResources = tierResources;
}
int size() {
return queuedTasks.size() + launchedTasks.size(); // don't queueTask assignedTasks.size(), they are duplicate of queuedTasks
}
int getTierNumber() {
return tierNumber;
}
String getName() {
return name;
}
}
| 9,212 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/queues/tiered/TierSlas.java | /*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.queues.tiered;
import java.util.HashMap;
import java.util.Map;
/* package */ class TierSlas {
private volatile Map<Integer, TierSla> resAllocsMap = new HashMap<>();
TierSla getTierSla(int tierIndex) {
return resAllocsMap.get(tierIndex);
}
double getBucketAllocation(int tier, String bucketName) {
final TierSla tierSla = resAllocsMap.get(tier);
return tierSla == null ? 1.0 : tierSla.evalAllocationShare(bucketName);
}
void setAllocations(TieredQueueSlas slas) {
resAllocsMap = slas.getSlas();
}
}
| 9,213 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/functions/Action1.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.functions;
/**
* A one-argument action.
*
* @param <T> the type of the argument accepted by the action
*/
public interface Action1<T> {
/**
* Invoke the action.
*
* @param t the single argument to the action
*/
public void call(T t);
}
| 9,214 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/functions/Action0.java | /*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.functions;
/**
* A no-argument action.
*/
public interface Action0 {
/**
* Invoke the action.
*/
void call();
}
| 9,215 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/functions/Func1.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.functions;
/**
* Represents a function with a single argument.
*/
public interface Func1<T1, R> {
/**
* Invoke the function.
*
* @param t1 the single argument to the function
* @return the return value from the function
*/
public R call(T1 t1);
}
| 9,216 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/functions/Action2.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.functions;
/**
* A two argument action.
*/
public interface Action2<T1, T2> {
/**
* Invoke the action.
*
* @param t1 the first argument to the action
* @param t2 the second argument to the action
*/
void call(T1 t1, T2 t2);
}
| 9,217 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/sla/ResAllocsUtil.java | package com.netflix.fenzo.sla;
import com.netflix.fenzo.VMResource;
import com.netflix.fenzo.queues.QueuableTask;
import java.util.Map;
/**
* Collection of helper functions for {@link com.netflix.fenzo.sla.ResAllocs} data type.
*/
public final class ResAllocsUtil {
public static ResAllocs add(ResAllocs first, ResAllocs second) {
return new ResAllocsBuilder(first.getTaskGroupName())
.withCores(first.getCores() + second.getCores())
.withMemory(first.getMemory() + second.getMemory())
.withNetworkMbps(first.getNetworkMbps() + second.getNetworkMbps())
.withDisk(first.getDisk() + second.getDisk())
.build();
}
public static ResAllocs add(ResAllocs first, QueuableTask second) {
return new ResAllocsBuilder(first.getTaskGroupName())
.withCores(first.getCores() + second.getCPUs())
.withMemory(first.getMemory() + second.getMemory())
.withNetworkMbps(first.getNetworkMbps() + second.getNetworkMbps())
.withDisk(first.getDisk() + second.getDisk())
.build();
}
public static ResAllocs subtract(ResAllocs first, ResAllocs second) {
return new ResAllocsBuilder(first.getTaskGroupName())
.withCores(first.getCores() - second.getCores())
.withMemory(first.getMemory() - second.getMemory())
.withNetworkMbps(first.getNetworkMbps() - second.getNetworkMbps())
.withDisk(first.getDisk() - second.getDisk())
.build();
}
public static ResAllocs ceilingOf(ResAllocs first, ResAllocs second) {
return new ResAllocsBuilder(first.getTaskGroupName())
.withCores(Math.max(first.getCores(), second.getCores()))
.withMemory(Math.max(first.getMemory(), second.getMemory()))
.withNetworkMbps(Math.max(first.getNetworkMbps(), second.getNetworkMbps()))
.withDisk(Math.max(first.getDisk(), second.getDisk()))
.build();
}
public static boolean isBounded(QueuableTask first, ResAllocs second) {
if (first.getCPUs() > second.getCores()) {
return false;
}
if (first.getMemory() > second.getMemory()) {
return false;
}
if (first.getNetworkMbps() > second.getNetworkMbps()) {
return false;
}
if (first.getDisk() > second.getDisk()) {
return false;
}
return true;
}
public static boolean isBounded(ResAllocs first, QueuableTask second) {
if (first.getCores() > second.getCPUs()) {
return false;
}
if (first.getMemory() > second.getMemory()) {
return false;
}
if (first.getNetworkMbps() > second.getNetworkMbps()) {
return false;
}
if (first.getDisk() > second.getDisk()) {
return false;
}
return true;
}
public static boolean isBounded(ResAllocs first, ResAllocs second) {
if (first.getCores() > second.getCores()) {
return false;
}
if (first.getMemory() > second.getMemory()) {
return false;
}
if (first.getNetworkMbps() > second.getNetworkMbps()) {
return false;
}
if (first.getDisk() > second.getDisk()) {
return false;
}
return true;
}
public static boolean hasEqualResources(ResAllocs first, ResAllocs second) {
return first.getCores() == second.getCores()
&& first.getMemory() == second.getMemory()
&& first.getNetworkMbps() == second.getNetworkMbps()
&& first.getDisk() == second.getDisk();
}
public static ResAllocs empty() {
return emptyOf("anonymous");
}
public static ResAllocs emptyOf(String name) {
return new ResAllocsBuilder(name)
.withCores(0)
.withMemory(0)
.withNetworkMbps(0)
.withDisk(0)
.build();
}
public static ResAllocs rename(String newName, ResAllocs allocs) {
return new ResAllocsBuilder(newName)
.withCores(allocs.getCores())
.withMemory(allocs.getMemory())
.withNetworkMbps(allocs.getNetworkMbps())
.withDisk(allocs.getDisk())
.build();
}
public static ResAllocs toResAllocs(String name, Map<VMResource, Double> resourceMap) {
return new ResAllocsBuilder(name)
.withCores(resourceMap.getOrDefault(VMResource.CPU, 0.0))
.withMemory(resourceMap.getOrDefault(VMResource.Memory, 0.0))
.withNetworkMbps(resourceMap.getOrDefault(VMResource.Network, 0.0))
.withDisk(resourceMap.getOrDefault(VMResource.Disk, 0.0))
.build();
}
}
| 9,218 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/sla/ResAllocs.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.sla;
/**
* Interface that represents resource allocation limits for a task group. This limits the sum of resources used
* by all tasks that have that task group name to the amounts provided.
* <p>
* You obtain an object that implements this method by means of the {@link ResAllocsBuilder}.
*
* @see <a href="https://github.com/Netflix/Fenzo/wiki/Resource-Allocation-Limits">Resource Allocation
* Limits</a>
*/
public interface ResAllocs {
String getTaskGroupName();
/**
* Limits the number of cores the task group can use to the number returned from this method.
*
* @return the maximum number of cores
*/
double getCores();
/**
* Limits the amount of memory the task group can use to the number of MB returned from this method
*
* @return the maximum amount of memory, in MB
*/
double getMemory();
/**
* Limits the amount of bandwidth the task group can use to the number of megabits per second returned from
* this method.
*
* @return the maximum network bandwidth, in Mbps
*/
double getNetworkMbps();
/**
* Limits the amount of disk space the task group can use to the number of MB returned from this method.
*
* @return the maximum disk space, in MB
*/
double getDisk();
/**
* Returns the the resource allocations in a string representation.
*
* @return the resources as a string
*/
default String getAsString() {
return "{ cpu: " + getCores() +
", memory: " + getMemory() +
", disk: " + getDisk() +
", networkMbps: " + getNetworkMbps() + " }";
}
}
| 9,219 |
0 | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-core/src/main/java/com/netflix/fenzo/sla/ResAllocsBuilder.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.sla;
/**
* Builder class for {@link ResAllocs}.
*/
public class ResAllocsBuilder {
private double cores = Double.MAX_VALUE;
private double memory = Double.MAX_VALUE;
private double networkMbps = Double.MAX_VALUE;
private double disk = Double.MAX_VALUE;
private final String taskGroupName;
public ResAllocsBuilder(String taskGroupName) {
this.taskGroupName = taskGroupName;
}
/**
* Limits the number of cores the task group can use to the number you pass in to this method.
*
* @param cores the maximum number of CPUs
* @return the same {@code ResAllocsBuilder}, modified accordingly
*/
public ResAllocsBuilder withCores(double cores) {
this.cores = cores;
return this;
}
/**
* Limits the amount of memory the task group can use to the number of MB you pass in to this method.
*
* @param memory the maximum amount of memory, in MB
* @return the same {@code ResAllocsBuilder}, modified accordingly
*/
public ResAllocsBuilder withMemory(double memory) {
this.memory = memory;
return this;
}
/**
* Limits the amount of bandwidth the task group can use to the number of megabits per second you pass in to
* this method.
*
* @param networkMbps the maximum about of bandwidth, in Mbps
* @return the same {@code ResAllocsBuilder}, modified accordingly
*/
public ResAllocsBuilder withNetworkMbps(double networkMbps) {
this.networkMbps = networkMbps;
return this;
}
/**
* Limits the amount of disk space the task group can use to the number of MB you pass in to this method.
*
* @param disk the maximum amount of disk space, in MB
* @return the same {@code ResAllocsBuilder}, modified accordingly
*/
public ResAllocsBuilder withDisk(double disk) {
this.disk = disk;
return this;
}
/**
* Builds a {@link ResAllocs} object based on your builder method instructions.
*
* @return a {@link ResAllocs} object, built to your specifications
*/
public ResAllocs build() {
return new ResAllocsImpl(taskGroupName, cores, memory, networkMbps, disk);
}
private static class ResAllocsImpl implements ResAllocs {
private final String taskGroupName;
private final double cores;
private final double memory;
private final double networkMbps;
private final double disk;
private ResAllocsImpl(String taskGroupName, double cores, double memory, double networkMbps, double disk) {
this.taskGroupName = taskGroupName;
this.cores = cores;
this.memory = memory;
this.networkMbps = networkMbps;
this.disk = disk;
}
@Override
public String getTaskGroupName() {
return taskGroupName;
}
@Override
public double getCores() {
return cores;
}
@Override
public double getMemory() {
return memory;
}
@Override
public double getNetworkMbps() {
return networkMbps;
}
@Override
public double getDisk() {
return disk;
}
@Override
public String toString() {
return "ResAllocsImpl{" +
"taskGroupName='" + taskGroupName + '\'' +
", cores=" + cores +
", memory=" + memory +
", networkMbps=" + networkMbps +
", disk=" + disk +
'}';
}
}
}
| 9,220 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/CronTrigger.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.quartz.ScheduleBuilder;
import rx.functions.Action1;
import java.util.Date;
import java.util.TimeZone;
import static org.quartz.CronScheduleBuilder.cronSchedule;
/**
* @warn class description missing
*/
public class CronTrigger<T> extends ScheduledTrigger<T> {
private String cronExpression;
private String timeZoneId = "America/Los_Angeles";
@JsonCreator
public CronTrigger(@JsonProperty("cronExpression") String cronExpression,
@JsonProperty("timeZoneId") String timeZoneId,
@JsonProperty("startAt") Date startAt,
@JsonProperty("name") String name,
@JsonProperty("data") T data,
@JsonProperty("dataType") Class<T> dataType,
@JsonProperty("action") Class<? extends Action1<T>> action) {
super(name, data, dataType, action, startAt, null);
this.cronExpression = cronExpression;
this.timeZoneId = timeZoneId == null || "".equals(timeZoneId) ? "America/Los_Angeles" : timeZoneId;
TriggerUtils.validateCronExpression(cronExpression);
}
public CronTrigger(String cronExpression,
String name,
T data,
Class<T> dataType,
Class<? extends Action1<T>> action) {
this(cronExpression, "America/Los_Angeles", new Date(), name, data, dataType, action);
}
/**
* @return
*/
public String getCronExpression() {
return cronExpression;
}
/**
* @param cronExpression
*/
public void setCronExpression(String cronExpression) {
this.cronExpression = cronExpression;
}
/**
* @warn method description missing
*
* @return
*/
@Override
@JsonIgnore
public ScheduleBuilder getScheduleBuilder() {
return cronSchedule(cronExpression).withMisfireHandlingInstructionDoNothing().inTimeZone(TimeZone.getTimeZone(timeZoneId));
}
/**
* @return
*/
public String toString() {
return "CronTrigger (" + getId() + ":" + getName() + ":" + cronExpression + ")";
}
}
| 9,221 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/Scheduler.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import org.quartz.*;
import org.quartz.Trigger;
import org.quartz.impl.StdSchedulerFactory;
import java.util.Properties;
import static org.quartz.JobBuilder.newJob;
/**
* Basically, a wrapper over Quartz scheduler. Not intended to be used by classes outside this package
*
*/
class Scheduler {
static final String DEFAULT_GROUP = "DEFAULT_GROUP";
private org.quartz.Scheduler quartzScheduler;
private static class SchedulerHolder {
private static final Scheduler INSTANCE = new Scheduler();
}
/**
* @warn method description missing
*
* @return
*/
static Scheduler getInstance() {
return SchedulerHolder.INSTANCE;
}
private Scheduler() {
}
/**
* @warn method description missing
* @warn parameter threadPoolSize description missing
* @warn exception SchedulerException description missing
*
* @param threadPoolSize
* @throws SchedulerException
*/
synchronized void startScheduler(int threadPoolSize) throws SchedulerException {
if (quartzScheduler == null) {
Properties props = new Properties();
props.setProperty("org.quartz.threadPool.threadCount", String.format("%d", threadPoolSize));
StdSchedulerFactory stdSchedulerFactory = new StdSchedulerFactory(props);
quartzScheduler = stdSchedulerFactory.getScheduler();
quartzScheduler.start();
}
}
/**
* @warn method description missing
* @warn exception SchedulerException description missing
*
* @throws SchedulerException
*/
void stopScheduler() throws SchedulerException {
quartzScheduler.shutdown();
}
/**
* @warn method description missing
* @warn parameter waitForJobsToComplete description missing
* @warn exception SchedulerException description missing
*
* @param waitForJobsToComplete
* @throws SchedulerException
*/
void stopScheduler(boolean waitForJobsToComplete) throws SchedulerException {
quartzScheduler.shutdown(waitForJobsToComplete);
}
/**
* @warn method description missing
* @warn parameter descriptions missing
* @warn exception SchedulerException description missing
*
* @param jobId
* @param jobGroup
* @param jobClass
* @param trigger
* @throws SchedulerException
*/
void scheduleQuartzJob(String jobId, String jobGroup, Class<? extends Job> jobClass, Trigger trigger) throws SchedulerException {
JobDetail job = newJob(jobClass).withIdentity(jobId, jobGroup).build();
quartzScheduler.scheduleJob(job, trigger);
}
/**
* @warn method description missing
* @warn parameter descriptions missing
* @warn exception SchedulerException description missing
*
* @param jobId
* @param jobGroup
* @throws SchedulerException
*/
void unscheduleQuartzJob(String jobId, String jobGroup) throws SchedulerException {
quartzScheduler.unscheduleJob(TriggerKey.triggerKey(jobId, jobGroup));
}
/**
* @warn method description missing
* @warn parameter descriptions missing
* @warn exception SchedulerException description missing
*
* @param jobId
* @param jobGroup
* @throws SchedulerException
* @return
*/
boolean isScheduled(String jobId, String jobGroup) throws SchedulerException {
return quartzScheduler.checkExists(new JobKey(jobId, jobGroup));
}
}
| 9,222 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/ScheduledTrigger.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.quartz.ScheduleBuilder;
import rx.functions.Action1;
import java.util.Date;
/**
* Placeholder super class for all the triggers that can be scheduled.
*/
public abstract class ScheduledTrigger<T> extends Trigger<T> {
private final Date startAt;
private final Date endAt;
@JsonIgnore
private org.quartz.Trigger quartzTrigger;
@JsonCreator
public ScheduledTrigger(
@JsonProperty("name") String name,
@JsonProperty("data") T data,
@JsonProperty("dataType") Class<T> dataType,
@JsonProperty("action") Class<? extends Action1<T>> action,
@JsonProperty("startAt") Date startAt,
@JsonProperty("endAt") Date endAt) {
super(name, data, dataType, action);
this.startAt = startAt;
this.endAt = endAt;
}
/**
* @warn method description missing
*
* @return
*/
public abstract ScheduleBuilder getScheduleBuilder();
/**
* @warn method description missing
* @warn param quartzTrigger description missing
*
* @param quartzTrigger
*/
void setQuartzTrigger(org.quartz.Trigger quartzTrigger) {
this.quartzTrigger = quartzTrigger;
}
org.quartz.Trigger getQuartzTrigger() {
return quartzTrigger;
}
public Date getStartAt() {
return startAt;
}
public Date getEndAt() {
return endAt;
}
public Date getNextFireTime() {
return quartzTrigger != null ? quartzTrigger.getNextFireTime() : null;
}
public Date getPreviousFireTime() {
return quartzTrigger != null ? quartzTrigger.getPreviousFireTime() : null;
}
}
| 9,223 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/TriggerOperator.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import com.netflix.fenzo.triggers.exceptions.SchedulerException;
import com.netflix.fenzo.triggers.exceptions.TriggerNotFoundException;
import com.netflix.fenzo.triggers.persistence.InMemoryTriggerDao;
import com.netflix.fenzo.triggers.persistence.TriggerDao;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rx.functions.Action1;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.quartz.TriggerBuilder.newTrigger;
import static org.quartz.TriggerKey.triggerKey;
/**
* @warn class description missing
*/
public class TriggerOperator {
public static final String TRIGGER_KEY = "trigger";
public static final String TRIGGER_OPERATOR_KEY = "triggerOperator";
private static final Logger logger = LoggerFactory.getLogger(TriggerOperator.class);
private final Scheduler scheduler;
private final TriggerDao triggerDao;
private final int threadPoolSize;
private final AtomicBoolean initialized = new AtomicBoolean(false);
/**
* Constructor
* @param threadPoolSize the thread pool size for the scheduler
*/
public TriggerOperator(int threadPoolSize) {
this(new InMemoryTriggerDao(), threadPoolSize);
}
/**
* Constructor
* @param triggerDao dao implementation for {@code Trigger}
* @param threadPoolSize the thread pool size for the scheduler
*/
public TriggerOperator(TriggerDao triggerDao, int threadPoolSize) {
this.triggerDao = triggerDao;
this.scheduler = Scheduler.getInstance();
this.threadPoolSize = threadPoolSize;
}
/**
* Users of this class must call {@code initialize()} before they use this class.
* @warn exception SchedulerException description missing
*
* @throws SchedulerException
*/
@PostConstruct
public void initialize() throws SchedulerException {
if (initialized.compareAndSet(false, true)) {
try {
this.scheduler.startScheduler(threadPoolSize);
} catch (org.quartz.SchedulerException se) {
throw new SchedulerException("Exception occurred while initializing TriggerOperator", se);
}
for (Iterator<Trigger> iterator = getTriggers().iterator(); iterator.hasNext(); ) {
Trigger trigger = iterator.next();
if (!trigger.isDisabled() && trigger instanceof ScheduledTrigger) {
scheduleTrigger((ScheduledTrigger) trigger);
}
}
}
}
/**
* @warn method description missing
* @warn exception SchedulerException description missing
*/
@PreDestroy
public void destroy() throws SchedulerException {
if (initialized.get() && this.scheduler != null) {
try {
this.scheduler.stopScheduler(true);
} catch (org.quartz.SchedulerException se) {
throw new SchedulerException("Exception occurred while destroying TriggerOperator", se);
}
}
}
/**
* Returns a default instance of {@code TriggerOperator} with sensible default values.
* Uses an in-memory implementation of Dao.
*
* @return
*/
public static TriggerOperator getInstance() {
return new TriggerOperator(new InMemoryTriggerDao(), 20);
}
/**
* @warn method description missing
* @warn parameter threadPoolSize description missing
* @param threadPoolSize
* @return
*/
public static TriggerOperator getInstance(int threadPoolSize) {
return new TriggerOperator(new InMemoryTriggerDao(), threadPoolSize);
}
/**
* @warn method description missing
* @warn parameter descriptions missing
* @param triggerDao
* @param threadPoolSize
* @return
*/
public static TriggerOperator getInstance(TriggerDao triggerDao, int threadPoolSize) {
return new TriggerOperator(triggerDao, threadPoolSize);
}
/**
* Returns the {@code Trigger} based on the unique trigger id.
*
* @param triggerId the string that uniquely identifies the {@code Trigger}
* @return the {@code Trigger} that matches {@code triggerId}
*/
public Trigger getTrigger(String triggerId) {
return triggerDao.getTrigger(triggerId);
}
/**
* Registers a {@code Trigger} with trigger service.
* @warn parameter descriptions missing
* @warn exception SchedulerException description missing
*
* @param triggerGroup
* @param trigger
* @throws SchedulerException
* @return
*/
public String registerTrigger(String triggerGroup, Trigger trigger) throws SchedulerException {
String triggerId = triggerDao.createTrigger(triggerGroup, trigger);
if (trigger instanceof ScheduledTrigger) {
scheduleTrigger((ScheduledTrigger) trigger);
}
return triggerId;
}
/**
* Disables the {@code Trigger}. If the {@code Trigger} is disabled it will <em>not</em> execute.
* @warn exception SchedulerException description missing
*
* @param triggerId the string that uniquely identifies the {@code Trigger} to be disabled
* @throws TriggerNotFoundException if there is no {@code Trigger} that matches {@code triggerId}
* @throws SchedulerException
*/
public void disableTrigger(String triggerId) throws TriggerNotFoundException, SchedulerException {
Trigger trigger = getTrigger(triggerId);
if (trigger != null) {
disableTrigger(trigger);
} else {
throw new TriggerNotFoundException("No trigger found with trigger id: " + triggerId);
}
}
/**
* Disables the {@code Trigger}. If the {@code Trigger} is disabled it will <em>not</em> execute.
* @warn exception SchedulerException description missing
*
* @param trigger the {@code Trigger} to be disabled
* @throws SchedulerException
*/
public void disableTrigger(Trigger trigger) throws SchedulerException {
trigger.setDisabled(true);
triggerDao.updateTrigger(trigger);
if (trigger instanceof ScheduledTrigger) {
unscheduleTrigger((ScheduledTrigger) trigger);
}
}
/**
* Enables the {@code Trigger}.
* @warn exception SchedulerException description missing
*
* @param triggerId the string that uniquely identifies the {@code Trigger} to be enabled
* @throws TriggerNotFoundException if there is no {@code Trigger} that matches {@code triggerId}
* @throws SchedulerException
*/
public void enableTrigger(String triggerId) throws TriggerNotFoundException, SchedulerException {
Trigger trigger = getTrigger(triggerId);
if (trigger != null) {
enableTrigger(trigger);
} else {
throw new TriggerNotFoundException("No trigger found with trigger id: " + triggerId);
}
}
/**
* Enables the {@code Trigger}
* @warn exception SchedulerException description missing
*
* @param trigger the {@code Trigger} to be enabled
* @throws SchedulerException
*/
public void enableTrigger(Trigger trigger) throws SchedulerException {
trigger.setDisabled(false);
triggerDao.updateTrigger(trigger);
if (trigger instanceof ScheduledTrigger) {
scheduleTrigger((ScheduledTrigger) trigger);
}
}
/**
* Deletes/Removes the {@code Trigger}. If it is a {@code ScheduledTrigger} then it is also un-scheduled from
* scheduler.
*
* @param triggerId the string that uniquely identifies the {@code Trigger} to be removed
* @throws TriggerNotFoundException if there is no {@code Trigger} that matches {@code triggerId}
* @throws SchedulerException
*/
public void deleteTrigger(String triggerGroup, String triggerId) throws TriggerNotFoundException, SchedulerException {
Trigger trigger = getTrigger(triggerId);
if (trigger != null) {
deleteTrigger(triggerGroup, trigger);
} else {
throw new TriggerNotFoundException("No trigger found for trigger id: " + triggerId);
}
}
/**
* Deletes/Removes the {@code Trigger}. If it is a {@code ScheduledTrigger} then it is also un-scheduled from
* scheduler.
* @warn exception SchedulerException description missing
*
* @param trigger the {@code Trigger} to be removed
*/
public void deleteTrigger(String triggerGroup, Trigger trigger) throws SchedulerException {
triggerDao.deleteTrigger(triggerGroup, trigger);
if (trigger instanceof ScheduledTrigger) {
unscheduleTrigger((ScheduledTrigger) trigger);
}
}
/**
* Schedules the {@code Trigger} using the scheduler.
* @warn exception SchedulerException description missing
* @warn parameter scheduledTrigger description missing
*
* @param scheduledTrigger
* @throws SchedulerException
*/
public void scheduleTrigger(ScheduledTrigger scheduledTrigger) throws SchedulerException {
if (!initialized.get())
throw new SchedulerException("Trigger service is not initialized. initialize() must be called before calling scheduleTrigger() method");
Map jobDataMap = new HashMap();
jobDataMap.put(TRIGGER_OPERATOR_KEY, this);
jobDataMap.put(TRIGGER_KEY, scheduledTrigger);
try {
org.quartz.Trigger quartzTrigger = newTrigger()
.withIdentity(triggerKey(scheduledTrigger.getId(), Scheduler.DEFAULT_GROUP))
.withSchedule(scheduledTrigger.getScheduleBuilder())
.usingJobData(new JobDataMap(jobDataMap))
.startAt(scheduledTrigger.getStartAt())
.endAt(scheduledTrigger.getEndAt())
.build();
scheduler.scheduleQuartzJob(scheduledTrigger.getId(), Scheduler.DEFAULT_GROUP, ScheduledTriggerJob.class, quartzTrigger);
scheduledTrigger.setQuartzTrigger(quartzTrigger);
logger.info("Successfully scheduled {}", scheduledTrigger);
} catch (org.quartz.SchedulerException e) {
throw new SchedulerException("Exception occurred while scheduling trigger: " + scheduledTrigger, e);
}
}
/**
* A quartz job that is executed every time a {@code Trigger} is invoked.
*/
public static class ScheduledTriggerJob implements org.quartz.Job {
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
TriggerOperator triggerOperator = (TriggerOperator) context.getMergedJobDataMap().get(TRIGGER_OPERATOR_KEY);
ScheduledTrigger scheduledTrigger = (ScheduledTrigger) context.getMergedJobDataMap().get(TRIGGER_KEY);
try {
logger.info("Executing scheduledTrigger: {}, Previous fire time: {}, Next fire time: {}",
scheduledTrigger, scheduledTrigger.getPreviousFireTime(), scheduledTrigger.getNextFireTime());
triggerOperator.execute(scheduledTrigger);
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
}
/**
* Checks if a {@code ScheduledTrigger} is scheduled in the scheduler or not.
* @warn exception SchedulerException description missing
* @warn parameter scheduledTrigger description missing
*
* @param scheduledTrigger
* @return
* @throws SchedulerException
*/
public boolean isScheduled(ScheduledTrigger scheduledTrigger) throws SchedulerException {
try {
return scheduler.isScheduled(scheduledTrigger.getId(), Scheduler.DEFAULT_GROUP);
} catch (org.quartz.SchedulerException e) {
throw new SchedulerException("Exception occurred while checking isScheduled() for: " + scheduledTrigger, e);
}
}
/**
* Un-schedules the {@code Trigger}.
* @warn exception SchedulerException description missing
* @warn parameter scheduledTrigger description missing
*
* @param scheduledTrigger
* @throws SchedulerException
*/
public void unscheduleTrigger(ScheduledTrigger scheduledTrigger) throws SchedulerException {
if (!initialized.get())
throw new SchedulerException("Trigger service is not initialized. initialize() must be called before calling unscheduleTrigger() method");
try {
scheduler.unscheduleQuartzJob(scheduledTrigger.getId(), Scheduler.DEFAULT_GROUP);
scheduledTrigger.setQuartzTrigger(null);
logger.info("Successfully unscheduled {}", scheduledTrigger);
} catch (org.quartz.SchedulerException e) {
throw new SchedulerException("Exception occurred while unscheduling trigger: " + scheduledTrigger, e);
}
}
/**
* Returns a list of {@code Trigger}s registered with the trigger service for the given triggerGroup.
* @warn parameter triggerGroup description missing
*
* @param triggerGroup
* @return
*/
public List<Trigger> getTriggers(String triggerGroup) {
return triggerDao.getTriggers(triggerGroup);
}
/**
* Returns a list of all the {@code Trigger}s registered with the trigger service.
*
* @return a list of the {@code Trigger}s that are registered with the trigger service
*/
public List<Trigger> getTriggers() {
return triggerDao.getTriggers();
}
/**
* Executes the {@code Trigger}.
*
* @param triggerId the string that uniquely identifies the {@code Trigger} to be executed
* @throws TriggerNotFoundException if there is no {@code Trigger} that matches {@code triggerId}
* @throws Exception if an exception occurred during the execution of the {@code Trigger}
*/
public void execute(String triggerId) throws Exception {
Trigger trigger = getTrigger(triggerId);
if (trigger != null) {
execute(trigger);
} else {
throw new TriggerNotFoundException(String.format("No trigger found with id: %s", triggerId));
}
}
/**
* Executes the {@code Trigger}.
*
* @param trigger the {@code Trigger} to be executed
* @throws Exception if an exception occurred during the execution of the {@code Trigger}
*/
public void execute(Trigger trigger) throws Exception {
if (trigger.isDisabled()) return;
try {
((Action1) trigger.getAction().newInstance()).call(trigger.getData());
} catch (Exception e) {
throw new Exception(String.format("Exception occurred while executing trigger '%s'", trigger), e);
}
}
}
| 9,224 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/IntervalTrigger.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.joda.time.Interval;
import org.quartz.ScheduleBuilder;
import org.quartz.SimpleScheduleBuilder;
import org.quartz.SimpleTrigger;
import org.quartz.spi.MutableTrigger;
import rx.functions.Action1;
public class IntervalTrigger<T> extends ScheduledTrigger<T> {
private final long repeatInterval;
private final int repeatCount;
/**
* Creates an interval based trigger
* @param iso8601Interval - See https://en.wikipedia.org/wiki/ISO_8601 for how to specify intervals with start time/end
* time/interval
* @param repeatCount - repeat count after first trigger. Specify -1 to repeat indefinitely
* @param name
* @param data
* @param dataType
* @param action
*/
@JsonCreator
public IntervalTrigger(@JsonProperty("iso8601Interval") String iso8601Interval,
@JsonProperty("repeatCount") int repeatCount,
@JsonProperty("name") String name,
@JsonProperty("data") T data,
@JsonProperty("dataType") Class<T> dataType,
@JsonProperty("action") Class<? extends Action1<T>> action) {
super(name, data, dataType, action, Interval.parse(iso8601Interval).getStart().toDate(), null);
final Interval jodaInterval = Interval.parse(iso8601Interval);
this.repeatCount = repeatCount; // -1 is repeat indefinitely
this.repeatInterval = Interval.parse(iso8601Interval).getEndMillis() - jodaInterval.getStartMillis();
}
@Override
@JsonIgnore
public ScheduleBuilder getScheduleBuilder() {
return new ScheduleBuilder<SimpleTrigger>() {
@Override
protected MutableTrigger build() {
return SimpleScheduleBuilder.simpleSchedule()
.withRepeatCount(repeatCount)
.withIntervalInMilliseconds(repeatInterval)
.withMisfireHandlingInstructionNextWithRemainingCount()
.build();
}
};
}
public long getRepeatInterval() {
return repeatInterval;
}
public int getRepeatCount() {
return repeatCount;
}
}
| 9,225 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/Trigger.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import rx.functions.Action1;
import java.util.Date;
/**
* Base class for all types of triggers
*/
@JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, include=JsonTypeInfo.As.PROPERTY, property="@class")
public class Trigger<T> {
private String id;
private final Date createdDate;
private T data;
private final Class<T> dataType;
private final Class<? extends Action1<? extends T>> action;
private final String name;
private boolean disabled;
@JsonCreator
public Trigger(@JsonProperty("name") String name,
@JsonProperty("data") T data,
@JsonProperty("dataType") Class<T> dataType,
@JsonProperty("action") Class<? extends Action1<T>> action) {
this.createdDate = new Date();
this.name = name;
this.data = data;
this.dataType = dataType;
this.action = action;
}
/**
* @warn method description missing
*
* @return
*/
public String getId() {
return id;
}
/**
* @warn method description missing
* @warn parameter id description missing
*
* @param id
*/
public void setId(String id) {
this.id = id;
}
/**
* @warn method description missing
*
* @return
*/
public Date getCreatedDate() {
return createdDate;
}
/**
* @warn method description missing
*
* @return
*/
public T getData() {
return data;
}
/**
* @warn method description missing
* @warn parameter data description missing
*
* @param data
*/
public void setData(T data) {
this.data = data;
}
/**
* @warn method description missing
*
* @return
*/
public Class<T> getDataType() {
return dataType;
}
/**
* @warn method description missing
*
* @return
*/
public Class<? extends Action1<? extends T>> getAction() {
return action;
}
/**
* @warn method description missing
*
* @return
*/
public String getName() {
return name;
}
/**
* @warn method description missing
*
* @return
*/
public boolean isDisabled() {
return disabled;
}
/**
* @warn method description missing
* @warn parameter disabled description missing
*
* @param disabled
*/
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
/**
* @warn method description missing
*
* @return
*/
public String toString() {
return "Trigger (" + id + ":" + name + ")";
}
}
| 9,226 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/TriggerUtils.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers;
import org.joda.time.Interval;
import org.quartz.CronExpression;
import java.text.ParseException;
import java.util.StringTokenizer;
/**
* @author sthadeshwar
*/
public class TriggerUtils {
public static boolean isValidCronExpression(String cronExpression) {
try {
validateCronExpression(cronExpression);
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
public static void validateCronExpression(String cronExpression) {
try {
if (cronExpression == null || cronExpression.equals("")) {
throw new IllegalArgumentException(String.format("Cron expression cannot be null or empty : %s", cronExpression));
}
StringTokenizer tokenizer = new StringTokenizer(cronExpression, " \t", false);
int tokens = tokenizer.countTokens();
String beginningToken = tokenizer.nextToken().trim();
if ("*".equals(beginningToken)) {
// For all practical purposes and for ALL clients of this library, this is true!
throw new IllegalArgumentException(
String.format("Cron expression cannot have '*' in the SECONDS (first) position : %s", cronExpression)
);
}
if (tokens > 7) {
throw new IllegalArgumentException(
String.format("Cron expression cannot have more than 7 fields : %s", cronExpression)
);
}
CronExpression.validateExpression(cronExpression);
} catch (ParseException e) {
throw new IllegalArgumentException(e.getMessage());
}
}
public static boolean isValidISO8601Interval(String iso8601Interval) {
try {
Interval.parse(iso8601Interval);
return true;
} catch (Exception e) {
return false;
}
}
public static void validateISO8601Interval(String iso8601Interval) {
try {
Interval.parse(iso8601Interval);
} catch (Exception e) {
throw new IllegalArgumentException(e.getMessage());
}
}
}
| 9,227 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/exceptions/TriggerNotFoundException.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers.exceptions;
/**
* @warn class description missing
*/
public class TriggerNotFoundException extends Exception {
public TriggerNotFoundException(String message) {
super(message);
}
public TriggerNotFoundException(String message, Throwable throwable) {
super(message, throwable);
}
}
| 9,228 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/exceptions/SchedulerException.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers.exceptions;
/**
* @warn class description missing
*/
public class SchedulerException extends Exception {
public SchedulerException(String message) {
super(message);
}
public SchedulerException(String message, Throwable throwable) {
super(message, throwable);
}
}
| 9,229 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/persistence/InMemoryTriggerDao.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers.persistence;
import com.netflix.fenzo.triggers.Trigger;
import java.util.List;
import java.util.UUID;
/**
* In-memory implementation of {@code TriggerDao}.
*/
public class InMemoryTriggerDao extends AbstractInMemoryDao<Trigger> implements TriggerDao {
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param triggerGroup
* @param trigger
* @return
*/
@Override
public String createTrigger(String triggerGroup, Trigger trigger) {
trigger.setId(createId(triggerGroup, UUID.randomUUID().toString()));
create(triggerGroup, trigger.getId(), trigger);
return trigger.getId();
}
/**
* @warn method description missing
* @warn parameter triggerId description missing
*
* @param triggerId
* @return
*/
@Override
public Trigger getTrigger(String triggerId) {
String triggerGroup = extractGroupFromId(triggerId);
return read(triggerGroup, triggerId);
}
/**
* @warn method description missing
* @warn parameter trigger description missing
*
* @param trigger
*/
@Override
public void updateTrigger(Trigger trigger) {
String triggerGroup = extractGroupFromId(trigger.getId());
update(triggerGroup, trigger.getId(), trigger);
}
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param triggerGroup
* @param trigger
*/
@Override
public void deleteTrigger(String triggerGroup, Trigger trigger) {
delete(triggerGroup, trigger.getId());
}
/**
* @warn method description missing
* @warn parameter triggerGroup description missing
*
* @param triggerGroup
* @return
*/
@Override
public List<Trigger> getTriggers(String triggerGroup) {
return list(triggerGroup);
}
/**
* @warn method description missing
*
* @return
*/
@Override
public List<Trigger> getTriggers() {
return list();
}
}
| 9,230 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/persistence/AbstractInMemoryDao.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers.persistence;
import java.lang.reflect.ParameterizedType;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @warn class description missing
* @author sthadeshwar
*/
public abstract class AbstractInMemoryDao<T> {
public static final String SEPARATOR = ":";
private final ConcurrentMap<String, ConcurrentMap<String,T>> map = new ConcurrentHashMap<String, ConcurrentMap<String,T>>();
private final String idSeparator;
protected AbstractInMemoryDao() {
ParameterizedType parameterizedType = (ParameterizedType) this.getClass().getGenericSuperclass();
Class<T> parameterClass = (Class<T>) parameterizedType.getActualTypeArguments()[0];
this.idSeparator = String.format("%s%s%s", SEPARATOR, parameterClass.getName(), SEPARATOR);
}
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param group
* @param id
* @param type
*/
protected void create(String group, String id, T type) {
ConcurrentMap<String, T> subMap = new ConcurrentHashMap<String, T>();
subMap.put(id, type);
Map existingTriggerMap = map.putIfAbsent(group, subMap);
if (existingTriggerMap != null) {
synchronized (map) {
map.get(group).put(id, type);
}
}
}
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param group
* @param id
* @param type
*/
protected void update(String group, String id, T type) {
synchronized (map) {
map.get(group).put(id, type);
}
}
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param group
* @param id
* @return
*/
protected T read(String group, String id) {
ConcurrentMap<String, T> subMap = map.get(group);
for (Iterator<String> iterator2 = subMap.keySet().iterator(); iterator2.hasNext();) {
String storedId = iterator2.next();
if (id.equals(storedId)) {
return subMap.get(id);
}
}
return null;
}
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param group
* @param id
*/
protected void delete(String group, String id) {
synchronized (map) {
map.get(group).remove(id);
}
}
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param group
* @param count
* @return
*/
protected List<T> list(String group, int count) {
List<T> items = list(group);
return items.size() > count ? items.subList(0, count) : items;
}
/**
* @warn method description missing
* @warn parameter group description missing
*
* @param group
* @return
*/
protected List<T> list(String group) {
return map.get(group) != null ? new ArrayList<T>(map.get(group).values()) : new ArrayList<T>();
}
/**
* @warn method description missing
*
* @return
*/
protected List<T> list() {
List<T> items = new ArrayList<>();
for (Iterator<String> iterator = map.keySet().iterator(); iterator.hasNext();) {
items.addAll(map.get(iterator.next()).values());
}
return items;
}
/**
* @warn method description missing
* @warn parameter descriptions missing
* @warn exception IllegalArgumentException description missing
*
* @param group
* @param id
* @return
* @throws IllegalArgumentException
*/
protected String createId(String group, String id) {
if (group == null || id == null || group.contains(idSeparator) || id.contains(idSeparator)) {
throw new IllegalArgumentException(String.format("Illegal arguments specified for column name creation (group = %s, id = %s)", group, id));
}
return String.format("%s%s%s", group, idSeparator, id);
}
/**
* @warn method description missing
* @warn parameter columnName description missing
* @warn exception IllegalArgumentException description missing
*
* @param columnName
* @return
* @throws IllegalArgumentException
*/
protected String extractGroupFromId(String columnName) {
if (columnName == null || !columnName.contains(idSeparator)) return columnName;
String[] tokens = columnName.split(idSeparator);
if (tokens.length == 2) {
return tokens[0];
} else {
throw new IllegalArgumentException(String.format("Cannot extract row key from column name string: %s", columnName));
}
}
}
| 9,231 |
0 | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers | Create_ds/Fenzo/fenzo-triggers/src/main/java/com/netflix/fenzo/triggers/persistence/TriggerDao.java | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.fenzo.triggers.persistence;
import com.netflix.fenzo.triggers.Trigger;
import java.util.List;
/**
* @warn interface description missing
*/
public interface TriggerDao {
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param triggerGroup
* @param trigger
* @return
*/
public String createTrigger(String triggerGroup, Trigger trigger);
/**
* @warn method description missing
* @warn parameter descriptions missing
*
* @param triggerGroup
* @param trigger
*/
public void deleteTrigger(String triggerGroup, Trigger trigger);
/**
* @warn method description missing
* @warn parameter description missing
*
* @param trigger
*/
public void updateTrigger(Trigger trigger);
/**
* @warn method description missing
* @warn parameter description missing
*
* @param triggerId
* @return
*/
public Trigger getTrigger(String triggerId);
/**
* @warn method description missing
* @warn parameter description missing
*
* @param triggerGroup
* @return
*/
public List<Trigger> getTriggers(String triggerGroup);
/**
* @warn method description missing
*
* @return
*/
public List<Trigger> getTriggers();
}
| 9,232 |
0 | Create_ds/dgs-examples-webflux/src/test/java/com/example | Create_ds/dgs-examples-webflux/src/test/java/com/example/demo/ReviewsDataFetcherTest.java | package com.example.demo;
import com.example.demo.datafetchers.ReviewsDataFetcher;
import com.example.demo.datafetchers.ShowsDatafetcher;
import com.example.demo.dataloaders.ReviewsDataLoader;
import com.example.demo.dataloaders.ReviewsDataLoaderWithContext;
import com.example.demo.generated.client.ReviewsGraphQLQuery;
import com.example.demo.generated.client.ReviewsProjectionRoot;
import com.example.demo.scalars.DateRange;
import com.example.demo.scalars.DateRangeScalar;
import com.example.demo.scalars.DateTimeScalar;
import com.example.demo.services.DefaultReviewsService;
import com.example.demo.services.ShowsServiceImpl;
import com.netflix.graphql.dgs.DgsQueryExecutor;
import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration;
import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest;
import graphql.ExecutionResult;
import graphql.schema.Coercing;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import java.time.LocalDate;
import java.util.HashMap;
import java.util.Map;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
@SpringBootTest(classes = {DgsAutoConfiguration.class, ReviewsDataLoaderWithContext.class, ShowsDatafetcher.class, ReviewsDataFetcher.class, ReviewsDataLoader.class, DateTimeScalar.class, DateRangeScalar.class, DefaultReviewsService.class, ShowsServiceImpl.class})
public class ReviewsDataFetcherTest {
@Autowired
DgsQueryExecutor dgsQueryExecutor;
@Test
public void testReviewsWithDateRange() {
Map<Class<?>, Coercing<?, ?>> scalars = new HashMap<>();
scalars.put(DateRange.class, new DateRangeScalar());
GraphQLQueryRequest request = new GraphQLQueryRequest(
ReviewsGraphQLQuery.newRequest().dateRange(new DateRange(LocalDate.of(2020, 1, 1), LocalDate.now())).build(),
new ReviewsProjectionRoot().submittedDate().starScore(), scalars);
ExecutionResult execute = dgsQueryExecutor.execute(request.serialize());
assertThat(execute.isDataPresent()).isTrue();
}
}
| 9,233 |
0 | Create_ds/dgs-examples-webflux/src/test/java/com/example | Create_ds/dgs-examples-webflux/src/test/java/com/example/demo/ReviewSubscriptionTest.java | package com.example.demo;
import com.example.demo.datafetchers.ReviewsDataFetcher;
import com.example.demo.generated.client.AddReviewGraphQLQuery;
import com.example.demo.generated.client.AddReviewProjectionRoot;
import com.example.demo.generated.types.Review;
import com.example.demo.generated.types.SubmittedReview;
import com.example.demo.scalars.DateRangeScalar;
import com.example.demo.scalars.DateTimeScalar;
import com.example.demo.services.DefaultReviewsService;
import com.example.demo.services.ShowsService;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.graphql.dgs.DgsQueryExecutor;
import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration;
import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest;
import graphql.ExecutionResult;
import org.junit.jupiter.api.Test;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import static org.assertj.core.api.AssertionsForClassTypes.assertThat;
/**
* Test the review added subscription.
* The subscription query returns a Publisher<ExecutionResult>.
* Each time a review is added, a new ExecutionResult is given to subscriber.
* Normally, this publisher is consumed by the Websocket/SSE subscription handler and you don't deal with this code directly, but for testing purposes it's useful to use the stream directly.
*/
@SpringBootTest(classes = {DefaultReviewsService.class, ReviewsDataFetcher.class, DgsAutoConfiguration.class, DateTimeScalar.class, DateRangeScalar.class})
public class ReviewSubscriptionTest {
@Autowired
DgsQueryExecutor dgsQueryExecutor;
@MockBean
ShowsService showsService;
@Test
void reviewSubscription() {
ExecutionResult executionResult = dgsQueryExecutor.execute("subscription { reviewAdded(showId: 1) {starScore} }");
Publisher<ExecutionResult> reviewPublisher = executionResult.getData();
List<Review> reviews = new CopyOnWriteArrayList<>();
reviewPublisher.subscribe(new Subscriber<ExecutionResult>() {
@Override
public void onSubscribe(Subscription s) {
s.request(2);
}
@Override
public void onNext(ExecutionResult executionResult) {
if (executionResult.getErrors().size() > 0) {
System.out.println(executionResult.getErrors());
}
Map<String, Object> review = executionResult.getData();
reviews.add(new ObjectMapper().convertValue(review.get("reviewAdded"), Review.class));
}
@Override
public void onError(Throwable t) {
}
@Override
public void onComplete() {
}
});
addReview();
addReview();
assertThat(reviews.size()).isEqualTo(2);
}
private void addReview() {
GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(
AddReviewGraphQLQuery.newRequest()
.review(
SubmittedReview.newBuilder()
.showId(1)
.username("testuser")
.starScore(5).build())
.build(),
new AddReviewProjectionRoot()
.username()
.starScore());
dgsQueryExecutor.execute(graphQLQueryRequest.serialize());
}
}
| 9,234 |
0 | Create_ds/dgs-examples-webflux/src/test/java/com/example | Create_ds/dgs-examples-webflux/src/test/java/com/example/demo/ShowsDatafetcherTest.java | package com.example.demo;
import com.example.demo.datafetchers.ReviewsDataFetcher;
import com.example.demo.datafetchers.ShowsDatafetcher;
import com.example.demo.dataloaders.ReviewsDataLoader;
import com.example.demo.dataloaders.ReviewsDataLoaderWithContext;
import com.example.demo.generated.client.*;
import com.example.demo.generated.types.Review;
import com.example.demo.generated.types.Show;
import com.example.demo.generated.types.SubmittedReview;
import com.example.demo.scalars.DateRangeScalar;
import com.example.demo.scalars.DateTimeScalar;
import com.example.demo.services.DefaultReviewsService;
import com.example.demo.services.ShowsService;
import com.jayway.jsonpath.TypeRef;
import com.netflix.graphql.dgs.DgsQueryExecutor;
import com.netflix.graphql.dgs.autoconfig.DgsAutoConfiguration;
import com.netflix.graphql.dgs.client.codegen.GraphQLQueryRequest;
import graphql.ExecutionResult;
import org.assertj.core.util.Maps;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import java.time.OffsetDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
@SpringBootTest(classes = {DgsAutoConfiguration.class, ReviewsDataLoaderWithContext.class, ShowsDatafetcher.class, ReviewsDataFetcher.class, ReviewsDataLoader.class, DateTimeScalar.class, DateRangeScalar.class})
class ShowsDatafetcherTest {
@Autowired
DgsQueryExecutor dgsQueryExecutor;
@MockBean
ShowsService showsService;
@MockBean
DefaultReviewsService reviewsService;
@BeforeEach
public void before() {
Mockito.when(showsService.shows())
.thenAnswer(invocation -> Collections.singletonList(Show.newBuilder().id(1).title("mock title").releaseYear(2020).build()));
Mockito.when(reviewsService.reviewsForShows(Collections.singletonList(1)))
.thenAnswer(invocation ->
Maps.newHashMap(1, Arrays.asList(
Review.newBuilder().username("DGS User").starScore(5).submittedDate(OffsetDateTime.now()).build(),
Review.newBuilder().username("DGS User 2").starScore(3).submittedDate(OffsetDateTime.now()).build())
));
}
@Test
void shows() {
List<String> titles = dgsQueryExecutor.executeAndExtractJsonPath(
" { shows { title releaseYear }}",
"data.shows[*].title");
assertThat(titles).contains("mock title");
}
@Test
void showsWithException() {
Mockito.when(showsService.shows()).thenThrow(new RuntimeException("nothing to see here"));
ExecutionResult result = dgsQueryExecutor.execute(
" { shows { title releaseYear }}");
assertThat(result.getErrors()).isNotEmpty();
assertThat(result.getErrors().get(0).getMessage()).isEqualTo("java.lang.RuntimeException: nothing to see here");
}
@Test
void showsWithQueryApi() {
GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(ShowsGraphQLQuery.newRequest().titleFilter("").build(), new ShowsProjectionRoot().title());
List<String> titles = dgsQueryExecutor.executeAndExtractJsonPath(graphQLQueryRequest.serialize(), "data.shows[*].title");
assertThat(titles).contains("mock title");
}
@Test
void showWithReviews() {
GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(ShowsGraphQLQuery.newRequest().titleFilter("").build(),
new ShowsProjectionRoot()
.title()
.reviews()
.username()
.starScore());
List<Show> shows = dgsQueryExecutor.executeAndExtractJsonPathAsObject(
graphQLQueryRequest.serialize(),
"data.shows[*]",
new TypeRef<List<Show>>() {
});
assertThat(shows.size()).isEqualTo(1);
assertThat(shows.get(0).getReviews().size()).isEqualTo(2);
}
@Test
void addReviewMutation() {
GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(
AddReviewGraphQLQuery.newRequest()
.review(SubmittedReview.newBuilder()
.showId(1)
.username("testuser")
.starScore(5).build())
.build(),
new AddReviewProjectionRoot().username().starScore());
ExecutionResult executionResult = dgsQueryExecutor.execute(graphQLQueryRequest.serialize());
assertThat(executionResult.getErrors()).isEmpty();
verify(reviewsService).reviewsForShow(1);
}
@Test
void addReviewsMutation() {
List<SubmittedReview> reviews = Collections.singletonList(
SubmittedReview.newBuilder().showId(1).username("testuser1").starScore(5).build());
GraphQLQueryRequest graphQLQueryRequest = new GraphQLQueryRequest(
AddReviewsGraphQLQuery.newRequest()
.reviews(reviews)
.build(),
new AddReviewsProjectionRoot().username().starScore());
ExecutionResult executionResult = dgsQueryExecutor.execute(graphQLQueryRequest.serialize());
assertThat(executionResult.getErrors()).isEmpty();
verify(reviewsService).reviewsForShows(Collections.singletonList(1));
}
} | 9,235 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/DemoApplication.java | package com.example.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class DemoApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
}
| 9,236 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/scalars/DateRange.java | package com.example.demo.scalars;
import java.time.LocalDate;
public class DateRange {
private final LocalDate from;
private final LocalDate to;
public DateRange(LocalDate from, LocalDate to) {
this.from = from;
this.to = to;
}
public LocalDate getFrom() {
return from;
}
public LocalDate getTo() {
return to;
}
}
| 9,237 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/scalars/DateRangeScalar.java | package com.example.demo.scalars;
import com.netflix.graphql.dgs.DgsScalar;
import graphql.language.StringValue;
import graphql.schema.Coercing;
import graphql.schema.CoercingParseLiteralException;
import graphql.schema.CoercingParseValueException;
import graphql.schema.CoercingSerializeException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
@DgsScalar(name = "DateRange")
public class DateRangeScalar implements Coercing<DateRange, String> {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("MM/dd/yyyy");
@Override
public String serialize(Object dataFetcherResult) throws CoercingSerializeException {
DateRange range = (DateRange) dataFetcherResult;
return range.getFrom().format(formatter) + "-" + range.getTo().format(formatter);
}
@Override
public DateRange parseValue(Object input) throws CoercingParseValueException {
String[] split = ((String) input).split("-");
LocalDate from = LocalDate.parse(split[0], formatter);
LocalDate to = LocalDate.parse(split[1], formatter);
return new DateRange(from, to);
}
@Override
public DateRange parseLiteral(Object input) throws CoercingParseLiteralException {
String[] split = ((StringValue) input).getValue().split("-");
LocalDate from = LocalDate.parse(split[0], formatter);
LocalDate to = LocalDate.parse(split[1], formatter);
return new DateRange(from, to);
}
}
| 9,238 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/scalars/DateTimeScalar.java | package com.example.demo.scalars;
import com.netflix.graphql.dgs.DgsComponent;
import com.netflix.graphql.dgs.DgsRuntimeWiring;
import graphql.scalars.ExtendedScalars;
import graphql.schema.idl.RuntimeWiring;
/**
* graphql-java provides optional scalars in the graphql-java-extended-scalars library.
* We can wire a scalar from this library by adding the scalar to the RuntimeWiring.
*/
@DgsComponent
public class DateTimeScalar {
@DgsRuntimeWiring
public RuntimeWiring.Builder addScalar(RuntimeWiring.Builder builder) {
return builder.scalar(ExtendedScalars.DateTime);
}
}
| 9,239 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/dataloaders/ReviewsDataLoader.java | package com.example.demo.dataloaders;
import com.example.demo.generated.types.Review;
import com.example.demo.services.DefaultReviewsService;
import com.netflix.graphql.dgs.DgsDataLoader;
import org.dataloader.MappedBatchLoader;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
@DgsDataLoader(name = "reviews")
public class ReviewsDataLoader implements MappedBatchLoader<Integer, List<Review>> {
private final DefaultReviewsService reviewsService;
public ReviewsDataLoader(DefaultReviewsService reviewsService) {
this.reviewsService = reviewsService;
}
/**
* This method will be called once, even if multiple datafetchers use the load() method on the DataLoader.
* This way reviews can be loaded for all the Shows in a single call instead of per individual Show.
*/
@Override
public CompletionStage<Map<Integer, List<Review>>> load(Set<Integer> keys) {
return CompletableFuture.supplyAsync(() -> reviewsService.reviewsForShows(new ArrayList<>(keys)));
}
}
| 9,240 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/dataloaders/ReviewsDataLoaderWithContext.java | package com.example.demo.dataloaders;
import com.example.demo.generated.types.Review;
import com.example.demo.services.DefaultReviewsService;
import com.netflix.graphql.dgs.DgsDataLoader;
import org.dataloader.BatchLoaderEnvironment;
import org.dataloader.MappedBatchLoaderWithContext;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
@DgsDataLoader(name = "reviewsWithContext")
public class ReviewsDataLoaderWithContext implements MappedBatchLoaderWithContext<Integer, List<Review>> {
private final DefaultReviewsService reviewsService;
@Autowired
public ReviewsDataLoaderWithContext(DefaultReviewsService reviewsService) {
this.reviewsService = reviewsService;
}
@Override
public CompletionStage<Map<Integer, List<Review>>> load(Set<Integer> keys, BatchLoaderEnvironment environment) {
return CompletableFuture.supplyAsync(() -> reviewsService.reviewsForShows(new ArrayList<>(keys)));
}
}
| 9,241 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/instrumentation/ExampleTracingInstrumentation.java | package com.example.demo.instrumentation;
import graphql.ExecutionResult;
import graphql.execution.instrumentation.InstrumentationContext;
import graphql.execution.instrumentation.InstrumentationState;
import graphql.execution.instrumentation.SimpleInstrumentation;
import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters;
import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters;
import graphql.schema.DataFetcher;
import graphql.schema.GraphQLNonNull;
import graphql.schema.GraphQLObjectType;
import graphql.schema.GraphQLOutputType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.concurrent.CompletableFuture;
@Component
public class ExampleTracingInstrumentation extends SimpleInstrumentation {
private final static Logger LOGGER = LoggerFactory.getLogger(ExampleTracingInstrumentation.class);
@Override
public InstrumentationState createState() {
return new TracingState();
}
@Override
public InstrumentationContext<ExecutionResult> beginExecution(InstrumentationExecutionParameters parameters) {
TracingState tracingState = parameters.getInstrumentationState();
tracingState.startTime = System.currentTimeMillis();
return super.beginExecution(parameters);
}
@Override
public DataFetcher<?> instrumentDataFetcher(DataFetcher<?> dataFetcher, InstrumentationFieldFetchParameters parameters) {
// We only care about user code
if (parameters.isTrivialDataFetcher()) {
return dataFetcher;
}
return environment -> {
long startTime = System.currentTimeMillis();
Object result = dataFetcher.get(environment);
if (result instanceof CompletableFuture) {
((CompletableFuture<?>) result).whenComplete((r, ex) -> {
long totalTime = System.currentTimeMillis() - startTime;
LOGGER.info("Async datafetcher {} took {}ms", findDatafetcherTag(parameters), totalTime);
});
} else {
long totalTime = System.currentTimeMillis() - startTime;
LOGGER.info("Datafetcher {} took {}ms", findDatafetcherTag(parameters), totalTime);
}
return result;
};
}
@Override
public CompletableFuture<ExecutionResult> instrumentExecutionResult(ExecutionResult executionResult, InstrumentationExecutionParameters parameters) {
TracingState tracingState = parameters.getInstrumentationState();
long totalTime = System.currentTimeMillis() - tracingState.startTime;
LOGGER.info("Total execution time: {}ms", totalTime);
return super.instrumentExecutionResult(executionResult, parameters);
}
private String findDatafetcherTag(InstrumentationFieldFetchParameters parameters) {
GraphQLOutputType type = parameters.getExecutionStepInfo().getParent().getType();
GraphQLObjectType parent;
if (type instanceof GraphQLNonNull) {
parent = (GraphQLObjectType) ((GraphQLNonNull) type).getWrappedType();
} else {
parent = (GraphQLObjectType) type;
}
return parent.getName() + "." + parameters.getExecutionStepInfo().getPath().getSegmentName();
}
static class TracingState implements InstrumentationState {
long startTime;
}
}
| 9,242 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/datafetchers/ReviewsDataFetcher.java | package com.example.demo.datafetchers;
import com.example.demo.dataloaders.ReviewsDataLoaderWithContext;
import com.example.demo.generated.DgsConstants;
import com.example.demo.generated.types.Review;
import com.example.demo.generated.types.Show;
import com.example.demo.generated.types.SubmittedReview;
import com.example.demo.scalars.DateRange;
import com.example.demo.services.DefaultReviewsService;
import com.example.demo.services.ReviewsService;
import com.netflix.graphql.dgs.*;
import org.dataloader.DataLoader;
import org.reactivestreams.Publisher;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
@DgsComponent
public class ReviewsDataFetcher {
private final ReviewsService reviewsService;
public ReviewsDataFetcher(DefaultReviewsService reviewsService) {
this.reviewsService = reviewsService;
}
/**
* This datafetcher will be called to resolve the "reviews" field on a Show.
* It's invoked for each individual Show, so if we would load 10 shows, this method gets called 10 times.
* To avoid the N+1 problem this datafetcher uses a DataLoader.
* Although the DataLoader is called for each individual show ID, it will batch up the actual loading to a single method call to the "load" method in the ReviewsDataLoader.
* For this to work correctly, the datafetcher needs to return a CompletableFuture.
*/
@DgsData(parentType = DgsConstants.SHOW.TYPE_NAME, field = DgsConstants.SHOW.Reviews)
public CompletableFuture<List<Review>> reviews(DgsDataFetchingEnvironment dfe, @InputArgument DateRange dateRange) {
System.out.println(dateRange);
//Instead of loading a DataLoader by name, we can use the DgsDataFetchingEnvironment and pass in the DataLoader classname.
DataLoader<Integer, List<Review>> reviewsDataLoader = dfe.getDataLoader(ReviewsDataLoaderWithContext.class);
//Because the reviews field is on Show, the getSource() method will return the Show instance.
Show show = dfe.getSource();
//Load the reviews from the DataLoader. This call is async and will be batched by the DataLoader mechanism.
return reviewsDataLoader.load(show.getId());
}
@DgsQuery
public List<Review> reviews(@InputArgument DateRange dateRange) {
return reviewsService.listReviews(dateRange);
}
@DgsMutation
public List<Review> addReview(@InputArgument SubmittedReview review) {
reviewsService.saveReview(review);
List<Review> reviews = reviewsService.reviewsForShow(review.getShowId());
return Optional.ofNullable(reviews).orElse(Collections.emptyList());
}
@DgsMutation
public List<Review> addReviews(@InputArgument(value = "reviews", collectionType = SubmittedReview.class) List<SubmittedReview> reviewsInput) {
reviewsService.saveReviews(reviewsInput);
List<Integer> showIds = reviewsInput.stream().map(SubmittedReview::getShowId).collect(Collectors.toList());
Map<Integer, List<Review>> reviews = reviewsService.reviewsForShows(showIds);
return reviews.values().stream().flatMap(List::stream).collect(Collectors.toList());
}
@DgsSubscription
public Publisher<Review> reviewAdded(@InputArgument Integer showId) {
return reviewsService.getReviewsPublisher();
}
}
| 9,243 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/datafetchers/ShowsDatafetcher.java | package com.example.demo.datafetchers;
import com.example.demo.generated.types.Show;
import com.example.demo.services.ShowsService;
import com.netflix.graphql.dgs.DgsComponent;
import com.netflix.graphql.dgs.DgsQuery;
import com.netflix.graphql.dgs.InputArgument;
import java.util.List;
import java.util.stream.Collectors;
@DgsComponent
public class ShowsDatafetcher {
private final ShowsService showsService;
public ShowsDatafetcher(ShowsService showsService) {
this.showsService = showsService;
}
/**
* This datafetcher resolves the shows field on Query.
* It uses an @InputArgument to get the titleFilter from the Query if one is defined.
*/
@DgsQuery
public List<Show> shows(@InputArgument("titleFilter") String titleFilter) {
if (titleFilter == null) {
return showsService.shows();
}
return showsService.shows().stream().filter(s -> s.getTitle().contains(titleFilter)).collect(Collectors.toList());
}
}
| 9,244 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/services/ShowsServiceImpl.java | package com.example.demo.services;
import com.example.demo.generated.types.Show;
import org.springframework.stereotype.Service;
import java.util.Arrays;
import java.util.List;
@Service
public class ShowsServiceImpl implements ShowsService {
@Override
public List<Show> shows() {
return Arrays.asList(
Show.newBuilder().id(1).title("Stranger Things").releaseYear(2016).build(),
Show.newBuilder().id(2).title("Ozark").releaseYear(2017).build(),
Show.newBuilder().id(3).title("The Crown").releaseYear(2016).build(),
Show.newBuilder().id(4).title("Dead to Me").releaseYear(2019).build(),
Show.newBuilder().id(5).title("Orange is the New Black").releaseYear(2013).build()
);
}
}
| 9,245 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/services/ReviewsService.java | package com.example.demo.services;
import com.example.demo.generated.types.Review;
import com.example.demo.generated.types.SubmittedReview;
import com.example.demo.scalars.DateRange;
import org.reactivestreams.Publisher;
import java.util.List;
import java.util.Map;
public interface ReviewsService {
List<Review> reviewsForShow(Integer showId);
Map<Integer, List<Review>> reviewsForShows(List<Integer> showIds);
void saveReview(SubmittedReview reviewInput);
void saveReviews(List<SubmittedReview> reviewsInput);
Publisher<Review> getReviewsPublisher();
List<Review> listReviews(DateRange dateRange);
}
| 9,246 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/services/ShowsService.java | package com.example.demo.services;
import com.example.demo.generated.types.Show;
import java.util.List;
public interface ShowsService {
List<Show> shows();
}
| 9,247 |
0 | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo | Create_ds/dgs-examples-webflux/src/main/java/com/example/demo/services/DefaultReviewsService.java | package com.example.demo.services;
import com.example.demo.generated.types.Review;
import com.example.demo.generated.types.SubmittedReview;
import com.example.demo.scalars.DateRange;
import com.github.javafaker.Faker;
import org.reactivestreams.Publisher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import reactor.core.publisher.ConnectableFlux;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import javax.annotation.PostConstruct;
import java.time.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* This service emulates a data store.
* For convenience in the demo we just generate Reviews in memory, but imagine this would be backed by for example a database.
* If this was indeed backed by a database, it would be very important to avoid the N+1 problem, which means we need to use a DataLoader to call this class.
*/
@Service
public class DefaultReviewsService implements ReviewsService {
private final static Logger logger = LoggerFactory.getLogger(DefaultReviewsService.class);
private final ShowsService showsService;
private final Map<Integer, List<Review>> reviews = new ConcurrentHashMap<>();
private FluxSink<Review> reviewsStream;
private ConnectableFlux<Review> reviewsPublisher;
public DefaultReviewsService(ShowsService showsService) {
this.showsService = showsService;
}
@PostConstruct
private void createReviews() {
Faker faker = new Faker();
//For each show we generate a random set of reviews.
showsService.shows().forEach(show -> {
List<Review> generatedReviews = IntStream.range(0, faker.number().numberBetween(1, 20)).mapToObj(number -> {
LocalDateTime date = faker.date().past(300, TimeUnit.DAYS).toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime();
return Review.newBuilder().submittedDate(OffsetDateTime.of(date, ZoneOffset.UTC)).username(faker.name().username()).starScore(faker.number().numberBetween(0, 6)).build();
}).collect(Collectors.toList());
reviews.put(show.getId(), generatedReviews);
});
Flux<Review> publisher = Flux.create(emitter -> {
reviewsStream = emitter;
});
reviewsPublisher = publisher.publish();
reviewsPublisher.connect();
}
/**
* Hopefully nobody calls this for multiple shows within a single query, that would indicate the N+1 problem!
*/
@Override
public List<Review> reviewsForShow(Integer showId) {
return reviews.get(showId);
}
/**
* This is the method we want to call when loading reviews for multiple shows.
* If this code was backed by a relational database, it would select reviews for all requested shows in a single SQL query.
*/
@Override
public Map<Integer, List<Review>> reviewsForShows(List<Integer> showIds) {
logger.info("Loading reviews for shows {}", showIds.stream().map(String::valueOf).collect(Collectors.joining(", ")));
return reviews
.entrySet()
.stream()
.filter(entry -> showIds.contains(entry.getKey())).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
@Override
public void saveReview(SubmittedReview reviewInput) {
List<Review> reviewsForShow = reviews.computeIfAbsent(reviewInput.getShowId(), (key) -> new ArrayList<>());
Review review = Review.newBuilder()
.username(reviewInput.getUsername())
.starScore(reviewInput.getStarScore())
.submittedDate(OffsetDateTime.now()).build();
reviewsForShow.add(review);
reviewsStream.next(review);
logger.info("Review added {}", review);
}
@Override
public void saveReviews(List<SubmittedReview> reviewsInput) {
reviewsInput.forEach(reviewInput -> {
List<Review> reviewsForShow = reviews.computeIfAbsent(reviewInput.getShowId(), (key) -> new ArrayList<>());
Review review = Review.newBuilder()
.username(reviewInput.getUsername())
.starScore(reviewInput.getStarScore())
.submittedDate(OffsetDateTime.now()).build();
reviewsForShow.add(review);
reviewsStream.next(review);
logger.info("Review added {}", review);
});
}
@Override
public Publisher<Review> getReviewsPublisher() {
return reviewsPublisher;
}
@Override
public List<Review> listReviews(DateRange dateRange) {
return reviews.values().stream().flatMap(Collection::stream)
.filter(r -> r.getSubmittedDate().isAfter(OffsetDateTime.of(dateRange.getFrom(), LocalTime.NOON, ZoneOffset.UTC)))
.filter(r -> r.getSubmittedDate().isBefore(OffsetDateTime.of(dateRange.getTo(), LocalTime.NOON, ZoneOffset.UTC)))
.collect(Collectors.toList());
}
}
| 9,248 |
0 | Create_ds/q/src/test/java/com/netflix/search | Create_ds/q/src/test/java/com/netflix/search/query/QueryTestsTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.netflix.search.query.utils.StringUtils;
import com.netflix.search.query.utils.TitleIdUtils;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.search.query.engine.solr.SolrIndexer;
import com.netflix.search.query.engine.solr.SolrSearcher;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ResultType;
import com.netflix.search.query.report.detail.DetailReport;
import com.netflix.search.query.report.detail.DetailReportHeader;
import com.netflix.search.query.report.detail.DetailReportItem;
import com.netflix.search.query.report.google.GoogleDataExtractor;
import com.netflix.search.query.report.summary.SummaryReport;
import com.netflix.search.query.report.summary.SummaryReportHeader;
import com.netflix.search.query.report.summary.SummaryReportItem;
import static org.mockito.ArgumentMatchers.anyString;
public class QueryTestsTest {
private static final String LANG1_EN = "en";
private static final String TEST1 = "test1";
private static final String DOC1 = "1";
private static final String TITLE1 = "title1";
private static final String DOC2 = "2";
private static final String TITLE2 = "title2";
private static final String Q1 = "a";
private static final String TEST1_EN = "test1-english";
SolrIndexer solrIndexerMock;
SolrSearcher solrSearchMock;;
GoogleDataExtractor googleDataExtractor;
QueryTests queryTests;
Map<String, Map<String, Set<String>>> queries;
Map<String, Set<String>> queryToTitles;
Map<String, String> titleIdToName;
Set<String> titles;
DetailReport detailReport;
DetailReport detailReportSpy;
SummaryReport summaryReport;
SummaryReport summaryReportSpy;
List<String> languages;
Set<String> hits;
Map<String, ReportItem> expectedSummaryItems;
Map<String, ReportItem> expectedDetailItems;
Map<ResultType, Integer> countersForExpectedReport;
TitleIdUtils titleIdUtils;
@BeforeMethod
public void setup() throws Throwable{
solrIndexerMock = Mockito.mock(SolrIndexer.class);
solrSearchMock = Mockito.mock(SolrSearcher.class);
googleDataExtractor = Mockito.mock(GoogleDataExtractor.class);
titleIdUtils = Mockito.mock(TitleIdUtils.class);
queryTests = new QueryTests(titleIdUtils);
Mockito.doNothing().when(googleDataExtractor).initExtractor();
queryTests.setIndexer(solrIndexerMock);
queryTests.setSearcher(solrSearchMock);
queryTests.setGoogleDataExtractor(googleDataExtractor);
titleIdToName = Maps.newHashMap();
queries = Maps.newHashMap();
queryToTitles = Maps.newHashMap();
titles = Sets.newHashSet();
titles.add(DOC1);
queryToTitles.put(Q1, titles );
queries.put(TEST1_EN, queryToTitles);
queryTests.setQueries(queries );
titleIdToName.put(DOC1, TITLE1);
titleIdToName.put(DOC2, TITLE2);
detailReport = new DetailReport();
detailReportSpy = Mockito.spy(detailReport);
Mockito.doNothing().when(detailReportSpy).saveToLocalDisk();
queryTests.setDetailReport(detailReportSpy);
summaryReport = new SummaryReport();
summaryReportSpy = Mockito.spy(summaryReport);
Mockito.doNothing().when(summaryReportSpy).saveToLocalDisk();
queryTests.setSummaryReport(summaryReportSpy);
languages = Lists.newArrayList();
languages.add(LANG1_EN);
hits = Sets.newLinkedHashSet();
expectedSummaryItems = Maps.newHashMap();
expectedDetailItems = Maps.newHashMap();
countersForExpectedReport = Maps.newHashMap();
Mockito.when(titleIdUtils.getTitleToIds(anyString(), anyString())).thenReturn(titleIdToName);
Mockito.when(solrSearchMock.getResults(Q1, languages, TEST1)).thenReturn(hits);
}
@Test
public void perfectScore() throws Throwable{
hits.add(DOC1);//expected and fetched
queryTests.runTest(TEST1_EN, languages);
List<ReportItem> summaryReportItems = summaryReportSpy.getItems();
countersForExpectedReport.put(ResultType.successQ, 1);
expectedSummaryItems.put(TEST1_EN, new SummaryReportItem(TEST1_EN, 1, 1, 1.0, 1.0, 1.0, countersForExpectedReport));
for (ReportItem item: summaryReportItems){
Assert.assertEquals(item.getNamedValues(), expectedSummaryItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
List<ReportItem> detailReportItems = detailReportSpy.getItems();
Assert.assertEquals(detailReportItems.size(), 0);
}
@Test
public void runPrecision() throws Throwable{
hits.add(DOC1);//expected and fetched
hits.add(DOC2);//unexpected but fetched
queryTests.runTest(TEST1_EN, languages);
List<ReportItem> summaryReportItems = summaryReportSpy.getItems();
countersForExpectedReport.put(ResultType.supersetResultsFailed, 1);
expectedSummaryItems.put(TEST1_EN, new SummaryReportItem(TEST1_EN, 1, 1, 0.5, 1.0, 0.6667, countersForExpectedReport));
for (ReportItem item: summaryReportItems){
Assert.assertEquals(item.getNamedValues(), expectedSummaryItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
List<ReportItem> detailReportItems = detailReportSpy.getItems();
expectedDetailItems.put(TEST1_EN, new DetailReportItem(TEST1_EN, ResultType.supersetResultsFailed, Q1, "", TITLE2));
for (ReportItem item: detailReportItems){
Assert.assertEquals(item.getNamedValues(), expectedDetailItems.get(item.getNamedValues().get(DetailReportHeader.name.toString())).getNamedValues());
}
}
@Test
public void runRecall() throws Throwable{
hits.add(DOC1);//expected and fetched
titles.add(DOC2);//expected but not fetched
queryTests.runTest(TEST1_EN, languages);
List<ReportItem> summaryReportItems = summaryReportSpy.getItems();
countersForExpectedReport.put(ResultType.differentResultsFailed, 1);
expectedSummaryItems.put(TEST1_EN, new SummaryReportItem(TEST1_EN, 2, 1, 1.0, 0.5, 0.6667, countersForExpectedReport));
for (ReportItem item: summaryReportItems){
Assert.assertEquals(item.getNamedValues(), expectedSummaryItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
List<ReportItem> detailReportItems = detailReportSpy.getItems();
expectedDetailItems.put(TEST1_EN, new DetailReportItem(TEST1_EN, ResultType.differentResultsFailed, Q1, TITLE2, ""));
for (ReportItem item: detailReportItems){
Assert.assertEquals(item.getNamedValues(), expectedDetailItems.get(item.getNamedValues().get(DetailReportHeader.name.toString())).getNamedValues());
}
}
@Test
public void runRecallNoResults() throws Throwable{
hits.clear();//expected but not fetched
titles.add(DOC1);//expected but not fetched
queryTests.runTest(TEST1_EN, languages);
List<ReportItem> summaryReportItems = summaryReportSpy.getItems();
countersForExpectedReport.put(ResultType.noResultsFailed, 1);
expectedSummaryItems.put(TEST1_EN, new SummaryReportItem(TEST1_EN, 1, 1, 0.0, 0.0, 0.0, countersForExpectedReport));
for (ReportItem item: summaryReportItems){
Assert.assertEquals(item.getNamedValues(), expectedSummaryItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
List<ReportItem> detailReportItems = detailReportSpy.getItems();
expectedDetailItems.put(TEST1_EN, new DetailReportItem(TEST1_EN, ResultType.noResultsFailed, Q1, TITLE1, DetailReport.NONE));
for (ReportItem item: detailReportItems){
Assert.assertEquals(item.getNamedValues(), expectedDetailItems.get(item.getNamedValues().get(DetailReportHeader.name.toString())).getNamedValues());
}
}
@Test
public void getLanguage(){
List<String> languageForTest = StringUtils.getLanguageForTest("swedish-video");
List<String> languageForTestExpected = Lists.newArrayList();
languageForTestExpected.add("sv");
Assert.assertEquals(languageForTest, languageForTestExpected);
}
}
| 9,249 |
0 | Create_ds/q/src/test/java/com/netflix/search/query | Create_ds/q/src/test/java/com/netflix/search/query/input/QueriesTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.input;
import java.util.Map;
import java.util.Set;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.testng.collections.Maps;
import com.google.common.collect.Sets;
import com.netflix.search.query.report.google.GoogleDataExtractor;
public class QueriesTest {
private static final String DATASET_ID = "english-video";
private static final String TEST1 = "regular";
private static final String ID1 = "123";
private static final String ID2 = "1234";
private static final String SPANISH_TITLE = "title es";
private static final String ENGLISH_TITLE = "title en";
private static final String Q1 = "abc";
@Test
void createDocTest()
{
GoogleDataExtractor titleExtractor = Mockito.mock(GoogleDataExtractor.class);
Map<String, Map<Integer, TitleWithQueries>> mapOfQueriesToTitles = Maps.newHashMap();
Map<Integer, TitleWithQueries> titlesWithQueries = Maps.newHashMap();
TitleWithQueries titleWithQueries = new TitleWithQueries(DATASET_ID);
titleWithQueries.setValue(TitleWithQueries.ID, ID1);
titleWithQueries.setValue(TitleWithQueries.TITLE_EN, ENGLISH_TITLE);
titleWithQueries.setValue(TitleWithQueries.TITLE_LOCALE, SPANISH_TITLE);
titleWithQueries.setValue(TitleWithQueries.Q_ + "regular", Q1);
titlesWithQueries.put(1, titleWithQueries);
TitleWithQueries titleWithQueries2 = new TitleWithQueries(DATASET_ID);
titleWithQueries2.setValue(TitleWithQueries.ID, ID2);
titleWithQueries2.setValue(TitleWithQueries.TITLE_EN, ENGLISH_TITLE);
titleWithQueries2.setValue(TitleWithQueries.TITLE_LOCALE, SPANISH_TITLE);
titleWithQueries2.setValue(TitleWithQueries.Q_ + "regular", Q1);
titlesWithQueries.put(2, titleWithQueries2);
mapOfQueriesToTitles.put(DATASET_ID, titlesWithQueries);
Mockito.when(titleExtractor.getTitlesWithQueriesPerDataset()).thenReturn(mapOfQueriesToTitles);
Queries queries = new Queries(DATASET_ID, TEST1, titleExtractor.getTitlesWithQueriesPerDataset());
queries.populateFromGoogleSpreadsheets();
Map<String, Set<String>> queryToIdMap = queries.getQueryToIdMap();
Map<String, Set<String>> expectedQueryToIdMap = Maps.newHashMap();
Set<String> titles = Sets.newHashSet();
titles.add(ID1+"_"+DATASET_ID);
titles.add(ID2+"_"+DATASET_ID);
expectedQueryToIdMap.put(Q1, titles);
Assert.assertEquals(queryToIdMap, expectedQueryToIdMap);
}
}
| 9,250 |
0 | Create_ds/q/src/test/java/com/netflix/search/query | Create_ds/q/src/test/java/com/netflix/search/query/utils/DateUtilsTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.testng.Assert;
import org.testng.annotations.Test;
public class DateUtilsTest {
@Test
public void dateTest() throws ParseException
{
DateUtil dateUtil = new DateUtil();
Date dateFromString = dateUtil.getDateFromString("2016Jun23_07:56:47");
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy hh:mm:ss");
Date expected = sdf.parse("23/06/2016 07:56:47");
Assert.assertEquals(dateFromString, expected);
}
}
| 9,251 |
0 | Create_ds/q/src/test/java/com/netflix/search/query | Create_ds/q/src/test/java/com/netflix/search/query/utils/HeaderUtilsTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.netflix.search.query.report.ReportType;
public class HeaderUtilsTest {
@Test
public void testSummary(){
String[] header = HeaderUtils.getHeader(ReportType.summary);
Assert.assertEquals(header, new String[]{"name", "titles", "queries", "supersetResultsFailed", "differentResultsFailed", "noResultsFailed", "successQ", "precision", "recall", "fmeasure", "comments"});
}
@Test
public void testDetail(){
String[] header = HeaderUtils.getHeader(ReportType.details);
Assert.assertEquals(header, new String[]{"name", "failure", "query", "expected", "actual", "comments"});
}
}
| 9,252 |
0 | Create_ds/q/src/test/java/com/netflix/search/query | Create_ds/q/src/test/java/com/netflix/search/query/utils/StringUtilsTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import org.testng.Assert;
import org.testng.annotations.Test;
public class StringUtilsTest {
@Test
public void testSummary(){
String id = StringUtils.createIdUsingTestName("test", "Some Name Containing Spaces");
Assert.assertEquals(id, "test_Some_Name_Containing_Spaces");
}
}
| 9,253 |
0 | Create_ds/q/src/test/java/com/netflix/search/query/report | Create_ds/q/src/test/java/com/netflix/search/query/report/detail/DetailReportTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.detail;
import java.util.List;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ResultType;
public class DetailReportTest {
private static final String TITLE1 = "title1";
private static final String TITLE2 = "title2";
private static final String Q1 = "a";
private static final String Q2 = "b";
private static final String TEST1_EN = "test1-english";
@Test
void emptyReportsTest()
{
Report previousReport = new DetailReport();
Report report = new DetailReport();
Report diffReport = report.createReportDiffs(previousReport);
List<ReportItem> items = diffReport.getItems();
List<ReportItem> expectedItems = Lists.newArrayList();
Assert.assertEquals(items, expectedItems);
}
@Test
void noDiffReportsTest()
{
Report previousReport = new DetailReport();
List<ReportItem> itemsForPreviousReport = Lists.newArrayList();
itemsForPreviousReport.add(new DetailReportItem(TEST1_EN, ResultType.noResultsFailed, Q1, TITLE1, DetailReport.NONE));
previousReport.setItems(itemsForPreviousReport);
Report report = new DetailReport();
List<ReportItem> itemsForCurrentReport = Lists.newArrayList();
itemsForCurrentReport.add(new DetailReportItem(TEST1_EN, ResultType.noResultsFailed, Q1, TITLE1, DetailReport.NONE));
report.setItems(itemsForCurrentReport);
Report diffReport = report.createReportDiffs(previousReport);
List<ReportItem> itemsForDiffsReport = diffReport.getItems();
Map<String, ReportItem> expectedItems = Maps.newHashMap();
expectedItems.put("test1", new DetailReportItem(TEST1_EN, ResultType.noResultsFailed, Q1, TITLE1, DetailReport.NONE));
for (ReportItem item : itemsForDiffsReport)
{
Assert.assertEquals(item.getNamedValues(), expectedItems.get(item.getNamedValues().get(DetailReportHeader.name.toString())).getNamedValues());
}
}
@Test
void diffReportsTest()
{
Report previousReport = new DetailReport();
List<ReportItem> itemsForPreviousReport = Lists.newArrayList();
itemsForPreviousReport.add(new DetailReportItem(TEST1_EN, ResultType.noResultsFailed, Q1, TITLE1, DetailReport.NONE));
previousReport.setItems(itemsForPreviousReport);
Report report = new DetailReport();
List<ReportItem> itemsForCurrentReport = Lists.newArrayList();
itemsForCurrentReport.add(new DetailReportItem(TEST1_EN, ResultType.supersetResultsFailed, Q2, TITLE1, TITLE2));
report.setItems(itemsForCurrentReport);
Report diffReport = report.createReportDiffs(previousReport);
List<ReportItem> itemsForDiffsReport = diffReport.getItems();
Map<String, ReportItem> expectedItems = Maps.newHashMap();
DetailReportItem fixedItem = new DetailReportItem(TEST1_EN, ResultType.noResultsFailed, Q1, TITLE1, DetailReport.NONE);
fixedItem.setValue(DetailReportHeader.comments.toString(), DetailReport.FIXED);
expectedItems.put(TEST1_EN + ResultType.noResultsFailed.toString(), fixedItem);
DetailReportItem newItem = new DetailReportItem(TEST1_EN, ResultType.supersetResultsFailed, Q2, TITLE1, TITLE2);
newItem.setValue(DetailReportHeader.comments.toString(), DetailReport.NEW);
expectedItems.put(TEST1_EN + ResultType.supersetResultsFailed.toString(), newItem);
for (ReportItem item : itemsForDiffsReport)
{
Map<String, String> actualNamedValues = item.getNamedValues();
String testName = actualNamedValues.get(DetailReportHeader.name.toString());
String failure = actualNamedValues.get(DetailReportHeader.failure.toString());
Map<String, String> expectedNamedValues = expectedItems.get(testName + failure).getNamedValues();
Assert.assertEquals(actualNamedValues, expectedNamedValues);
}
}
}
| 9,254 |
0 | Create_ds/q/src/test/java/com/netflix/search/query/report | Create_ds/q/src/test/java/com/netflix/search/query/report/summary/SummaryReportTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.summary;
import java.util.List;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ResultType;
public class SummaryReportTest {
private static final String TEST1 = "test1";
@Test
void emptyReportsTest()
{
Report previousSummaryReport = new SummaryReport();
Report report = new SummaryReport();
Report diffReport = report.createReportDiffs(previousSummaryReport);
List<ReportItem> items = diffReport.getItems();
List<ReportItem> expectedItems = Lists.newArrayList();
Assert.assertEquals(items, expectedItems);
}
@Test
void noDiffReportsTest()
{
Report previousSummaryReport = new SummaryReport();
List<ReportItem> itemsForPreviousReport = Lists.newArrayList();
Map<ResultType, Integer> countersForPreviousReport = Maps.newHashMap();
itemsForPreviousReport.add(new SummaryReportItem(TEST1, 1, 1, 0.1, 0.1, 0.1, countersForPreviousReport));
previousSummaryReport.setItems(itemsForPreviousReport);
Report report = new SummaryReport();
List<ReportItem> itemsForCurrentReport = Lists.newArrayList();
Map<ResultType, Integer> countersForCurrentReport = Maps.newHashMap();
itemsForCurrentReport.add(new SummaryReportItem(TEST1, 1, 1, 0.1, 0.1, 0.1, countersForCurrentReport));
report.setItems(itemsForCurrentReport);
Report diffReport = report.createReportDiffs(previousSummaryReport);
List<ReportItem> itemsForDiffsReport = diffReport.getItems();
Map<String, ReportItem> expectedItems = Maps.newHashMap();
Map<ResultType, Integer> countersForExpectedDiffReport = Maps.newHashMap();
expectedItems.put(TEST1, new SummaryReportItem(TEST1, 0, 0, 0.0, 0.0, 0.0, countersForExpectedDiffReport));
for (ReportItem item: itemsForDiffsReport){
Assert.assertEquals(item.getNamedValues(), expectedItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
}
@Test
void diffReportsTest()
{
Report previousSummaryReport = new SummaryReport();
List<ReportItem> itemsForPreviousReport = Lists.newArrayList();
Map<ResultType, Integer> countersForPreviousReport = Maps.newHashMap();
itemsForPreviousReport.add(new SummaryReportItem(TEST1, 1, 1, 0.1, 0.1, 0.1, countersForPreviousReport));
previousSummaryReport.setItems(itemsForPreviousReport);
Report report = new SummaryReport();
List<ReportItem> itemsForCurrentReport = Lists.newArrayList();
Map<ResultType, Integer> countersForCurrentReport = Maps.newHashMap();
itemsForCurrentReport.add(new SummaryReportItem(TEST1, 1, 1, 0.1, 0.1, 0.2, countersForCurrentReport));
report.setItems(itemsForCurrentReport);
Report diffReport = report.createReportDiffs(previousSummaryReport);
List<ReportItem> itemsForDiffsReport = diffReport.getItems();
Map<String, ReportItem> expectedItems = Maps.newHashMap();
Map<ResultType, Integer> countersForExpectedDiffReport = Maps.newHashMap();
expectedItems.put(TEST1, new SummaryReportItem(TEST1, 0, 0, 0.0, 0.0, 0.1, countersForExpectedDiffReport));
for (ReportItem item: itemsForDiffsReport){
Assert.assertEquals(item.getNamedValues(), expectedItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
}
@Test
void previousNullReportTest()
{
Report report = new SummaryReport();
List<ReportItem> itemsForCurrentReport = Lists.newArrayList();
Map<ResultType, Integer> countersForCurrentReport = Maps.newHashMap();
itemsForCurrentReport.add(new SummaryReportItem(TEST1, 1, 1, 0.1, 0.1, 0.1, countersForCurrentReport));
report.setItems(itemsForCurrentReport);
Report diffReport = report.createReportDiffs(null);
List<ReportItem> itemsForDiffsReport = diffReport.getItems();
Map<String, ReportItem> expectedItems = Maps.newHashMap();
Map<ResultType, Integer> countersForExpectedDiffReport = Maps.newHashMap();
expectedItems.put(TEST1, new SummaryReportItem(TEST1, 0, 0, 0.0, 0.0, 0.0, countersForExpectedDiffReport));
for (ReportItem item: itemsForDiffsReport){
Assert.assertEquals(item.getNamedValues(), expectedItems.get(item.getNamedValues().get(SummaryReportHeader.name.toString())).getNamedValues());
}
}
}
| 9,255 |
0 | Create_ds/q/src/test/java/com/netflix/search/query/engine | Create_ds/q/src/test/java/com/netflix/search/query/engine/solr/SolrSearcherTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.solr;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
public class SolrSearcherTest {
private static final String LOCALE = "es";
private static final String TEST1 = "test1";
@Test
void getUrlTest()
{
List<String> languages = Lists.newArrayList();
languages.add(LOCALE);
SolrSearcher searcher = new SolrSearcher();
String urlForGettingDoc = searcher.getUrlForGettingDoc("abc", languages, TEST1);
Assert.assertEquals(urlForGettingDoc, "http://localhost:8983/solr/qtest/select?q=%22abc%22&defType=edismax&lowercaseOperators=false&rows=100000&qs=10&fl=id%2C+title_en&sort=id+DESC&qf=title_es+title_aka_es&fq=query_testing_type%3Atest1&wt=json");
}
}
| 9,256 |
0 | Create_ds/q/src/test/java/com/netflix/search/query/engine | Create_ds/q/src/test/java/com/netflix/search/query/engine/solr/SolrIndexerTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.solr;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.testng.collections.Maps;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.netflix.search.query.Properties;
import com.netflix.search.query.engine.BaseIndexer;
public class SolrIndexerTest {
private static final String LOCALE = "es";
private static final String ID = "123";
private static final String ALT_TITLE = "title es 2";
private static final String SPANISH_TITLE = "title es";
private static final String ENGLISH_TITLE = "title en";
private static final String TEST1 = "test1";
@Test
void createDocTest()
{
BaseIndexer indexer = new SolrIndexer("", TEST1);
List<String> languages = Lists.newArrayList();
languages.add(LOCALE);
Map<String, Object> createdDoc = indexer.createDoc(ID, ENGLISH_TITLE, SPANISH_TITLE, ALT_TITLE, languages);
Map<String, Object> expectedDoc = Maps.newHashMap();
expectedDoc.put(Properties.idField.get(), ID + "_" + TEST1);
expectedDoc.put(Properties.titleFields.get().get(0) + "_en", ENGLISH_TITLE);
Set<Object> localizedTitles = Sets.newHashSet();
localizedTitles.add(SPANISH_TITLE);
expectedDoc.put(Properties.titleFields.get().get(0) + "_es", localizedTitles);
expectedDoc.put(Properties.docTypeFieldName.get(), TEST1);
Assert.assertEquals(createdDoc, expectedDoc);
StringBuilder jsonStringOfDoc = indexer.getJsonStringOfDoc(new ObjectMapper().valueToTree(createdDoc));
Assert.assertEquals(jsonStringOfDoc.toString(), "[{\"query_testing_type\":\"test1\",\"title_en\":\"title en\",\"id\":\"123_test1\",\"title_es\":[\"title es\"]}]");
String urlForAddingDoc = indexer.getUrlForAddingDoc(createdDoc);
Assert.assertEquals(urlForAddingDoc, "http://localhost:8983/solr/qtest/update");
String urlForCommitting = indexer.getUrlForCommitting();
Assert.assertEquals(urlForCommitting, "http://localhost:8983/solr/qtest/update?commit=true");
}
}
| 9,257 |
0 | Create_ds/q/src/test/java/com/netflix/search/query/engine | Create_ds/q/src/test/java/com/netflix/search/query/engine/es/ElasticsearchIndexerTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.es;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.testng.collections.Maps;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.netflix.search.query.Properties;
import com.netflix.search.query.engine.BaseIndexer;
public class ElasticsearchIndexerTest {
private static final String LOCALE = "es";
private static final String ID = "123";
private static final String ALT_TITLE = "title es 2";
private static final String SPANISH_TITLE = "title es";
private static final String ENGLISH_TITLE = "title en";
private static final String TEST1 = "test1";
@Test
void createDocTest()
{
BaseIndexer indexer = new ElasticsearchIndexer("", TEST1);
List<String> languages = Lists.newArrayList();
languages.add(LOCALE);
Map<String, Object> createdDoc = indexer.createDoc(ID, ENGLISH_TITLE, SPANISH_TITLE, ALT_TITLE, languages);
Map<String, Object> expectedDoc = Maps.newHashMap();
expectedDoc.put(Properties.idField.get(), ID + "_" + TEST1);
expectedDoc.put(Properties.titleFields.get().get(0) + "_en", ENGLISH_TITLE);
Set<Object> localizedTitles = Sets.newHashSet();
localizedTitles.add(SPANISH_TITLE);
expectedDoc.put(Properties.titleFields.get().get(0) + "_es", localizedTitles);
expectedDoc.put(Properties.docTypeFieldName.get(), TEST1);
Assert.assertEquals(createdDoc, expectedDoc);
StringBuilder jsonStringOfDoc = indexer.getJsonStringOfDoc(new ObjectMapper().valueToTree(createdDoc));
Assert.assertEquals(jsonStringOfDoc.toString(), "{\"query_testing_type\":\"test1\",\"title_en\":\"title en\",\"id\":\"123_test1\",\"title_es\":[\"title es\"]}");
String urlForAddingDoc = indexer.getUrlForAddingDoc(createdDoc);
Assert.assertEquals(urlForAddingDoc, "http://localhost:8983/solr/qtest/test_doc/123_test1");
String urlForCommitting = indexer.getUrlForCommitting();
Assert.assertEquals(urlForCommitting, "http://localhost:8983/solr/qtest/_flush");
}
}
| 9,258 |
0 | Create_ds/q/src/test/java/com/netflix/search/query/engine | Create_ds/q/src/test/java/com/netflix/search/query/engine/es/ElasticsearchSearcherTest.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.es;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.collect.Lists;
public class ElasticsearchSearcherTest {
private static final String LOCALE = "es";
private static final String TEST1 = "test1";
@Test
void emptyReportsTest() throws JsonProcessingException
{
List<String> languages = Lists.newArrayList();
languages.add(LOCALE);
ElasticsearchSearcher searcher = new ElasticsearchSearcher();
String json = searcher.getJsonForQuery("abc", languages, TEST1);
Assert.assertEquals(json, "{\"query\":{\"filtered\":{\"filter\":{\"term\":{\"query_testing_type\":\"test1\"}},\"query\":{\"multi_match\":{\"query\":\"abc\",\"type\":\"best_fields\",\"fields\":[\"title_es\",\"title_aka_es\"],\"operator\":\"and\"}}}},\"sort\":{\"id\":{\"order\":\"desc\"}}}");
}
}
| 9,259 |
0 | Create_ds/q/src/main/java/com/netflix/search | Create_ds/q/src/main/java/com/netflix/search/query/Properties.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query;
import com.netflix.config.DynamicBooleanProperty;
import com.netflix.config.DynamicIntProperty;
import com.netflix.config.DynamicStringListProperty;
import com.netflix.config.DynamicStringMapProperty;
import com.netflix.config.DynamicStringProperty;
import com.netflix.config.DynamicStringSetProperty;
public class Properties {
public static final DynamicStringProperty engineHost = new DynamicStringProperty("search.query.testing.engineHost", "localhost");
public static final DynamicStringProperty enginePort = new DynamicStringProperty("search.query.testing.enginePort", "8983");
public static final DynamicStringProperty engineServlet = new DynamicStringProperty("search.query.testing.engineServlet", "solr");
public static final DynamicStringProperty engineIndexName = new DynamicStringProperty("search.query.testing.engineIndexName", "qtest");
public static final DynamicStringProperty engineType = new DynamicStringProperty("search.query.testing.engineType", "solr");
public static final DynamicStringProperty esDocType = new DynamicStringProperty("search.query.testing.esDocType", "test_doc");
public static final DynamicStringListProperty validDataSetsId = new DynamicStringListProperty("search.query.testing.validDataSetsId", "swedish-video");
public static final DynamicStringListProperty queryCategories = new DynamicStringListProperty("search.query.testing.queryCategories", "regular,misspelled");
public static final DynamicStringSetProperty languagesRequiringAdditionalField = new DynamicStringSetProperty("search.query.testing.languagesRequiringAdditionalField", "");
public static final DynamicStringSetProperty languagesRequiringTransliterationFromEnglish = new DynamicStringSetProperty("search.query.testing.languagesRequiringTransliterationFromEnglish", "");
public static final DynamicStringProperty transliterationFieldName = new DynamicStringProperty("search.query.testing.transliterationFieldName", "");
public static final DynamicStringMapProperty languageExpansionBasedOnTestNames = new DynamicStringMapProperty("search.query.testing.languageExpansionBasedOnTestNames", "swedish=sv");
public static final DynamicBooleanProperty isLocalTest = new DynamicBooleanProperty("search.query.testing.isLocalTest",false);
public static final DynamicBooleanProperty isDevOnlyTest = new DynamicBooleanProperty("search.query.testing.isDevOnlyTest",false);
public static final DynamicStringProperty dataDir = new DynamicStringProperty("search.query.testing.dataDir", "data/q_tests/");
public static final DynamicBooleanProperty isPrintUrl = new DynamicBooleanProperty("search.query.testing.isPrintUrl",false);
public static final DynamicStringProperty idField = new DynamicStringProperty("search.query.testing.idField", "id");
public static final DynamicStringListProperty titleFields = new DynamicStringListProperty("search.query.testing.titleFields", "title");
public static final DynamicStringListProperty titleAkaFields = new DynamicStringListProperty("search.query.testing.titleAkaFields", "title_aka");
public static final DynamicStringListProperty requiredNumericFields = new DynamicStringListProperty("search.query.testing.requiredNumericFields", "");
public static final DynamicStringListProperty requiredStringFields = new DynamicStringListProperty("search.query.testing.requiredStringFields", "");
public static final DynamicStringProperty docTypeFieldName = new DynamicStringProperty("search.query.testing.docTypeFieldName", "query_testing_type");
public static final DynamicStringProperty inputDelimiter = new DynamicStringProperty("search.query.testing.inputDelimiter", "\t");
public static final DynamicStringProperty dateFormat = new DynamicStringProperty("search.query.testing.dateFormat", "yyyyMMMdd_HH:mm:ss");
public static final DynamicStringProperty serviceAccountEmail = new DynamicStringProperty("search.query.testing.serviceAccountEmail", "CHANGE-ME@appspot.gserviceaccount.com");
public static final DynamicStringProperty googleAppName = new DynamicStringProperty("search.query.testing.googleAppName", "CHANGE-ME");
public static final DynamicStringProperty p12KeyFileName = new DynamicStringProperty("search.query.testing.p12KeyFileName", "CHANGE-ME.p12");
public static final DynamicStringProperty googleSheetsKeyDir = new DynamicStringProperty("search.query.testing.googleSheetsKeyDir", "data/g_sheets");
public static final DynamicIntProperty googleApiThrottlePause = new DynamicIntProperty("search.query.testing.googleApiThrottlePause", 1500);
public static final DynamicStringProperty inputQueriesSheet = new DynamicStringProperty("search.query.testing.inputQueriesSheet", "query-testing-framework-input");
public static final DynamicStringProperty sumReportSheet = new DynamicStringProperty("search.query.testing.sumReportSheet", "query-testing-framework-results-sum");
public static final DynamicStringProperty detailReportSheet = new DynamicStringProperty("search.query.testing.detailReportSheet", "query-testing-framework-results-details");
public static final DynamicStringProperty sumReportSheetDev = new DynamicStringProperty("search.query.testing.sumReportSheetDev", "query-testing-framework-results-sum-dev");
public static final DynamicStringProperty detailReportSheetDev = new DynamicStringProperty("search.query.testing.detailReportSheetDev", "query-testing-framework-results-details-dev");
}
| 9,260 |
0 | Create_ds/q/src/main/java/com/netflix/search | Create_ds/q/src/main/java/com/netflix/search/query/QueryTests.java | /**
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query;
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.netflix.search.query.utils.StringUtils;
import com.netflix.search.query.utils.TitleIdUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.search.query.engine.BaseIndexer;
import com.netflix.search.query.engine.BaseSearcher;
import com.netflix.search.query.engine.es.ElasticsearchIndexer;
import com.netflix.search.query.engine.es.ElasticsearchSearcher;
import com.netflix.search.query.engine.solr.SolrIndexer;
import com.netflix.search.query.engine.solr.SolrSearcher;
import com.netflix.search.query.input.Queries;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.ResultType;
import com.netflix.search.query.report.detail.DetailReport;
import com.netflix.search.query.report.google.GoogleDataExtractor;
import com.netflix.search.query.report.summary.SummaryReport;
public class QueryTests {
public static final Logger logger = LoggerFactory.getLogger(QueryTests.class);
private BaseIndexer indexer = null;
private BaseSearcher searcher = null;
private Map<String, Map<String, Set<String>>> queries = Maps.newLinkedHashMap();
private GoogleDataExtractor googleDataExtractor;
private DetailReport detailReport;
private SummaryReport summaryReport;
private TitleIdUtils titleIdUtils;
public BaseIndexer getIndexer() {
return indexer;
}
public void setIndexer(BaseIndexer indexer) {
this.indexer = indexer;
}
public Map<String, Map<String, Set<String>>> getQueries() {
return queries;
}
public void setQueries(Map<String, Map<String, Set<String>>> queries) {
this.queries = queries;
}
public GoogleDataExtractor getGoogleDataExtractor() {
if (googleDataExtractor == null) {
googleDataExtractor = new GoogleDataExtractor();
try {
googleDataExtractor.initExtractor();
googleDataExtractor.setReportNamesAndDownloadData();
} catch (Throwable e) {
logger.error("Error trying to init the GoogleDataExtractor", e);
}
}
return googleDataExtractor;
}
public QueryTests(TitleIdUtils titleIdUtils){
this.titleIdUtils = titleIdUtils;
}
public void setGoogleDataExtractor(GoogleDataExtractor googleDataExtractor) {
this.googleDataExtractor = googleDataExtractor;
}
public DetailReport getDetailReport() {
if (detailReport == null)
detailReport = new DetailReport();
return detailReport;
}
public void setDetailReport(DetailReport detailReport) {
this.detailReport = detailReport;
}
public SummaryReport getSummaryReport() {
if (summaryReport == null)
summaryReport = new SummaryReport();
return summaryReport;
}
public void setSummaryReport(SummaryReport summaryReport) {
this.summaryReport = summaryReport;
}
public void setSearcher(BaseSearcher searcher) {
this.searcher = searcher;
}
public static void main(String[] args) {
new QueryTests(new TitleIdUtils()).getDataRunTestsUpdateReports();
}
public void getDataRunTestsUpdateReports() {
try {
long start = System.currentTimeMillis();
googleDataExtractor = getGoogleDataExtractor();
detailReport = getDetailReport();
summaryReport = getSummaryReport();
Report previousSummaryReport = googleDataExtractor.getPreviousSummaryReport();
Report previousDetailReport = googleDataExtractor.getPreviousDetailReport();
populateAllQueriesFromGoogleSpreadsheets();
runAllTests();
detailReport.saveToLocalDisk();
logger.info("Generated: " + detailReport);
summaryReport.saveToLocalDisk();
logger.info("Generated: " + summaryReport);
Report detailDiffs = detailReport.createReportDiffs(previousDetailReport);
logger.info("Generated: " + detailDiffs);
Report summaryDiff = summaryReport.createReportDiffs(previousSummaryReport);
logger.info("Generated: " + summaryDiff);
detailDiffs.saveToLocalDisk();
summaryDiff.saveToLocalDisk();
if (!Properties.isLocalTest.get()) {
googleDataExtractor.publishReportToGoogleSpreadsheet(summaryReport);
googleDataExtractor.publishReportToGoogleSpreadsheet(summaryDiff);
googleDataExtractor.publishReportToGoogleSpreadsheet(detailReport);
googleDataExtractor.publishReportToGoogleSpreadsheet(detailDiffs);
}
logger.info("All tests took: " + (System.currentTimeMillis() - start) + " ms");
} catch (Throwable e) {
logger.error("Couldn't proceed running query tests", e);
}
}
private void runAllTests() throws Throwable {
nextTest:
for (String testName : queries.keySet()) {
long start = System.currentTimeMillis();
if (queries.get(testName) == null || queries.get(testName).size() == 0)
continue nextTest;
List<String> languages = StringUtils.getLanguageForTest(testName);
indexer = getIndexer(testName);
if (indexer == null)
continue nextTest;
searcher = getSearcher();
logger.info("Processing: " + testName);
indexer.indexData(languages);
languages.add("en");//for search add en fields
runTest(testName, languages);
logger.info(testName + " took: " + (System.currentTimeMillis() - start) + " ms");
}
}
private BaseSearcher getSearcher() {
if (Properties.engineType.get().equalsIgnoreCase("solr"))
return new SolrSearcher();
else if (Properties.engineType.get().equalsIgnoreCase("es"))
return new ElasticsearchSearcher();
else {
logger.error("No support for the engine type: " + Properties.engineType.get());
return null;
}
}
protected void populateAllQueriesFromGoogleSpreadsheets() {
for (String dataset : Properties.validDataSetsId.get()) {
for (String queryCategory : Properties.queryCategories.get()) {
String testKey = dataset + StringUtils.SHEET_TAB_NAME_DELIMITER + queryCategory;
Queries queriesHolder = null;
queriesHolder = new Queries(dataset, queryCategory, googleDataExtractor.getTitlesWithQueriesPerDataset());
queriesHolder.populateFromGoogleSpreadsheets();
queries.put(testKey, queriesHolder.getQueryToIdMap());
}
}
}
public void runTest(String testName, List<String> languages) throws Throwable {
Map<String, Set<String>> queryToIds = queries.get(testName);
Set<String> titlesTested = Sets.newHashSet();
String datasetId = StringUtils.getDatasetId(testName);
String inputFileName = Properties.dataDir.get() + datasetId + ".tsv";
Map<String, String> titleIdToName = titleIdUtils.getTitleToIds(inputFileName, datasetId);
List<Double> precisionList = Lists.newArrayList();
List<Double> recallList = Lists.newArrayList();
List<Double> fMeasureList = Lists.newArrayList();
Map<ResultType, Integer> counters = Maps.newLinkedHashMap();
for (String q : queryToIds.keySet()) {
Set<String> relevantDocuments = queryToIds.get(q);
if (relevantDocuments != null)
titlesTested.addAll(relevantDocuments);
Set<String> results = searcher.getResults(q, languages, StringUtils.getDatasetId(testName));
summaryReport.updateStatistic(relevantDocuments, results, precisionList, recallList, fMeasureList);
detailReport.updateReport(queryToIds, q, results, testName, titleIdToName, counters);
}
summaryReport.updateSummaryReport(testName, titlesTested.size(), queryToIds.size(), precisionList, recallList, fMeasureList, counters);
}
private static BaseIndexer getIndexer(String testId) {
String datasetId = StringUtils.getDatasetId(testId);
String inputFileName = Properties.dataDir.get() + datasetId + ".tsv";
if (new File(inputFileName).exists())
if (Properties.engineType.get().equalsIgnoreCase("solr"))
return new SolrIndexer(inputFileName, datasetId);
else if (Properties.engineType.get().equalsIgnoreCase("es"))
return new ElasticsearchIndexer(inputFileName, datasetId);
else {
logger.error("No support for the engine type: " + Properties.engineType.get());
return null;
}
else {
logger.error("Data doesn't exist: " + inputFileName + " skipping the test " + testId);
return null;
}
}
}
| 9,261 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/input/TitleWithQueries.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.input;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.search.query.Properties;
import com.netflix.search.query.utils.StringUtils;
public class TitleWithQueries {
public static final Logger logger = LoggerFactory.getLogger(TitleWithQueries.class);
public static final String Q_ = "q_";
public static final String TITLE_ALT = "title_alt";
public static final String TITLE_LOCALE = "title_locale";
public static final String TITLE_EN = "title_en";
public static final String ID = "id";
private static final String SHEET_NAME_DELIMITER = "-";
private static final Joiner JOINER_QUERIES = Joiner.on("~~~");
private static final Joiner JOINER_CATEGORIES = Joiner.on("=");
private String id;
private String titleEn;
private String titleLocale;
private String titleAlt;
private final String language;
private final String entityType;
private String sheetId;
private Map<String, Set<String>> queriesByCategory = Maps.newLinkedHashMap();
public TitleWithQueries(String sheetId) {
String[] id = sheetId.split(SHEET_NAME_DELIMITER);
this.language = id[0];
this.entityType = id[1];
this.sheetId = sheetId;
}
public String getId()
{
return StringUtils.createIdUsingTestName(id, sheetId);
}
public String getTitleEn()
{
return titleEn;
}
public String getTitleLocale()
{
return titleLocale;
}
public String getTitleAlt()
{
return titleAlt;
}
public String getLanguage()
{
return language;
}
public String getEntityType()
{
return entityType;
}
public Map<String, Set<String>> getQueriesByCategory()
{
return queriesByCategory;
}
public void setValue(String headerValue, String value)
{
if(headerValue==null){
logger.error("Header is missing for this value: " + value);
return;
}
if (value != null && !value.isEmpty()) {
if (headerValue.equalsIgnoreCase(ID))
this.id = value;
else if (headerValue.equalsIgnoreCase(TITLE_EN))
this.titleEn = value;
else if (headerValue.equalsIgnoreCase(TITLE_LOCALE))
this.titleLocale = value;
else if (headerValue.equalsIgnoreCase(TITLE_ALT))
this.titleAlt = value;
else if (headerValue.startsWith(Q_)) {
String cleanedHeader = headerValue.substring(2);
Set<String> queriesForThisCategory = queriesByCategory.get(cleanedHeader);
if (queriesForThisCategory == null)
queriesForThisCategory = Sets.newLinkedHashSet();
queriesForThisCategory.add(value);
queriesByCategory.put(cleanedHeader, queriesForThisCategory);
}
}
}
@Override
public String toString()
{
List<String> mapToList = mapToList(queriesByCategory);
return getId() + Properties.inputDelimiter.get() + titleEn + Properties.inputDelimiter.get() + titleLocale + Properties.inputDelimiter.get() + (titleAlt==null?"":titleAlt) + Properties.inputDelimiter.get()+"q=" + mapToList;
}
private List<String> mapToList(final Map<String, Set<String>> input)
{
return Lists.newArrayList(Iterables.transform(input.entrySet(), new Function<Map.Entry<String, Set<String>>, String>() {
public String apply(final Map.Entry<String, Set<String>> input)
{
return JOINER_CATEGORIES.join(input.getKey(), JOINER_QUERIES.join(input.getValue()));
}
}));
}
}
| 9,262 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/input/Queries.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.input;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.search.query.report.google.GoogleDataExtractor;
public class Queries {
private Map<String, Set<String>> queryToIdMap = Maps.newHashMap();
private String datasetId;
private String queryCategory;
private Map<String, Map<Integer, TitleWithQueries>> titlesWithQueriesPerDataset;
public Queries(String datasetId, String queryCategory, Map<String, Map<Integer, TitleWithQueries>> titlesWithQueriesPerDataset) {
this.queryToIdMap = Maps.newHashMap();
this.datasetId = datasetId;
this.queryCategory = queryCategory;
this.titlesWithQueriesPerDataset = titlesWithQueriesPerDataset;
}
public void populateFromGoogleSpreadsheets()
{
Map<Integer, TitleWithQueries> titlesWithQueries = titlesWithQueriesPerDataset.get(datasetId);
if (titlesWithQueries != null)
for (Integer row : titlesWithQueries.keySet()) {
TitleWithQueries titleWithQueries = titlesWithQueries.get(row);
Set<String> queriesForThisCategory = titleWithQueries.getQueriesByCategory().get(queryCategory);
if (queriesForThisCategory != null)
for (String q : queriesForThisCategory)
put(q, titleWithQueries.getId());
}
}
public Map<String, Set<String>> getQueryToIdMap()
{
return queryToIdMap;
}
public void put(String query, String id)
{
if (query != null && !query.isEmpty()) {
Set<String> ids = queryToIdMap.get(query);
if (ids == null)
ids = Sets.newHashSet();
ids.add(id);
queryToIdMap.put(query, ids);
}
}
} | 9,263 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/utils/StringUtils.java | /**
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import com.google.common.collect.Lists;
import com.netflix.search.query.Properties;
import java.util.List;
import java.util.Map;
public class StringUtils {
public static final String MAP_VALUE_DELIMITER = "\\|";
public static final String SHEET_TAB_NAME_DELIMITER = "-";
public static String createIdUsingTestName(String id, String testName) {
return (id + "_" + testName).replaceAll("\\.|\\ ", "_");
}
public static List<String> getLanguageForTest(String testName) {
List<String> languages = Lists.newArrayList();
Map<String, String> languageExpansionBasedOnTestNames = Properties.languageExpansionBasedOnTestNames.getMap();
String languagePartOfTestName = testName.split(SHEET_TAB_NAME_DELIMITER)[0];
String lanuguagesAsAString = languageExpansionBasedOnTestNames.get(languagePartOfTestName);
String[] languagesFromMap = lanuguagesAsAString.split(MAP_VALUE_DELIMITER);
for (String lanuguage : languagesFromMap)
languages.add(lanuguage);
return languages;
}
public static String getDatasetId(String testId) {
return testId.substring(0, testId.lastIndexOf('-'));
}
}
| 9,264 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/utils/TitleIdUtils.java | /**
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import com.google.common.collect.Maps;
import com.netflix.search.query.Properties;
import java.io.*;
import java.util.*;
public class TitleIdUtils {
public static final String ENCODING = "UTF-8";
private static final int BUFFER_SIZE = 1 << 16; // 64K
public Map<String, String> getTitleToIds(String inputFileName, String testName) throws IOException {
Map<String, String> titleIdToName = Maps.newHashMap();
InputStream is = new BufferedInputStream(new FileInputStream(inputFileName), BUFFER_SIZE);
BufferedReader reader = new BufferedReader(new InputStreamReader(is, ENCODING), BUFFER_SIZE);
String lineString = null;
while ((lineString = reader.readLine()) != null) {
String[] line = lineString.split(Properties.inputDelimiter.get());
String id = line[0];
titleIdToName.put(StringUtils.createIdUsingTestName(id, testName), line[2]);
}
reader.close();
is.close();
return titleIdToName;
}
} | 9,265 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/utils/DateUtil.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.search.query.Properties;
public class DateUtil {
public static final Logger logger = LoggerFactory.getLogger(DateUtil.class);
private final DateFormat DATE_FORMAT = new SimpleDateFormat(Properties.dateFormat.get(), Locale.ENGLISH);
public String getStringFromDate(Date date)
{
return DATE_FORMAT.format(date);
}
public Date getDateFromCurrentTime()
{
return new Date(System.currentTimeMillis());
}
public Date getDateFromString(String dateString)
{
try {
return DATE_FORMAT.parse(dateString);
} catch (Throwable e) {
logger.error("Error trying to create Date from String", e);
}
return null;
}
}
| 9,266 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/utils/HeaderUtils.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.utils;
import com.netflix.search.query.report.ReportType;
import com.netflix.search.query.report.detail.DetailReportHeader;
import com.netflix.search.query.report.summary.SummaryReportHeader;
public class HeaderUtils {
public static String[] getHeader(ReportType type)
{
if (type.equals(ReportType.summary))
{
SummaryReportHeader[] values = SummaryReportHeader.values();
String[] returnValue = new String[values.length];
for (int i = 0; i < values.length; i++)
{
returnValue[i] = values[i].toString();
}
return returnValue;
} else if (type.equals(ReportType.details))
{
DetailReportHeader[] values = DetailReportHeader.values();
String[] returnValue = new String[values.length];
for (int i = 0; i < values.length; i++)
{
returnValue[i] = values[i].toString();
}
return returnValue;
} else
return null;
}
}
| 9,267 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/report/ReportItem.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report;
import java.util.Map;
import com.google.api.client.util.Maps;
import com.google.common.base.Joiner;
import com.netflix.search.query.Properties;
public abstract class ReportItem {
private Map<String, String> namedValues= Maps.newLinkedHashMap();
private String key = null;
public Map<String, String> getNamedValues()
{
return namedValues;
}
public String getKey()
{
return key;
}
public void setKey(String key)
{
this.key = key;
}
public ReportItem(Map<String, String> namedValues) {
this.namedValues = namedValues;
this.key = getKeyFromNamedValues();
}
public ReportItem() {
super();
}
protected abstract String getKeyFromNamedValues();
protected abstract void appendKeyFromNamedValues(String headerValue, String value);
public void setValue(String headerValue, String value)
{
if (value != null) {
namedValues.put(headerValue, value);
appendKeyFromNamedValues(headerValue, value);
}
}
@Override
public String toString()
{
Joiner joiner = Joiner.on(Properties.inputDelimiter.get());
return joiner.join(namedValues.values());
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((key == null) ? 0 : key.hashCode());
return result;
}
}
| 9,268 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/report/ResultType.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report;
public enum ResultType {
supersetResultsFailed, differentResultsFailed, noResultsFailed, successQ;
}
| 9,269 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/report/Report.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.Date;
import java.util.List;
import java.util.Map;
import com.netflix.search.query.report.detail.DetailReport;
import com.netflix.search.query.report.detail.DetailReportItem;
import com.netflix.search.query.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.util.Lists;
import com.google.api.client.util.Maps;
import com.google.common.base.Joiner;
import com.netflix.search.query.Properties;
import com.netflix.search.query.utils.DateUtil;
import com.netflix.search.query.utils.HeaderUtils;
public abstract class Report {
public static final Logger logger = LoggerFactory.getLogger(Report.class);
private static final String ENCODING = "UTF-8";
private static final int BUFFER_SIZE = 1 << 16; // 64K
private List<ReportItem> items = Lists.newArrayList();
private Date date;
private DateUtil dateUtil = new DateUtil();
public Report() {
this.date = dateUtil.getDateFromCurrentTime();
}
public void setDate(String dateString)
{
if (dateString != null)
this.date = dateUtil.getDateFromString(dateString);
}
public String reportNameForUpload()
{
return dateUtil.getStringFromDate(date);
}
@Override
public String toString()
{
return getReportName() + "_" + dateUtil.getStringFromDate(date);
}
public List<ReportItem> getItems()
{
return items;
}
public void setItems(List<ReportItem> items)
{
this.items = items;
}
protected abstract String getReportName();
public abstract ReportType getReportType();
protected abstract ReportItem getDiffForReportItem(ReportItem previousItem, ReportItem currentItem);
protected abstract Report newReport(List<ReportItem> items);
public Report createReportDiffs(Report previous)
{
List<ReportItem> returnValueItems = Lists.newArrayList();
Map<ReportItem, ReportItem> currentMap = Maps.newLinkedHashMap();
for (ReportItem currentItem : this.getItems()) {
currentMap.put(currentItem, currentItem);
}
Map<ReportItem, ReportItem> previousMap = Maps.newLinkedHashMap();
if (previous != null && previous.getItems() != null)
{
for (ReportItem previousItem : previous.getItems())
{
previousMap.put(previousItem, previousItem);
}
for (ReportItem key : previous.getItems())
{
ReportItem diffForReportItem = getDiffForReportItem(key, currentMap.get(key));
if (diffForReportItem != null)
returnValueItems.add(diffForReportItem);
}
for (ReportItem key : this.getItems())
{
if (!previous.getItems().contains(key))
{
ReportItem diffForReportItem = getDiffForReportItem(previousMap.get(key), key);
if (diffForReportItem != null)
{
returnValueItems.add(diffForReportItem);
}
}
}
}
return newReport(returnValueItems);
}
public void saveToLocalDisk() throws Throwable
{
String header = getHeaderForFlatFilePrint(HeaderUtils.getHeader(getReportType()));
printReportToLocalDisk(Properties.dataDir.get() + getReportName(), header, items);
}
public static DetailReport copyCurrentFileToPreviousAndGetPrevious(String currentName, String previousName) throws IOException {
File currentFile = new File(Properties.dataDir.get() + currentName);
Path currentPath = currentFile.toPath();
File previousFile = new File(Properties.dataDir.get() + previousName+".tsv");
Path previousPath = previousFile.toPath();
Files.copy(currentPath, previousPath, StandardCopyOption.REPLACE_EXISTING);
DetailReport previousDetailReport = new DetailReport();
List<ReportItem> items = Lists.newArrayList();
InputStream is = new BufferedInputStream(new FileInputStream(previousFile), BUFFER_SIZE);
BufferedReader reader = new BufferedReader(new InputStreamReader(is, ENCODING), BUFFER_SIZE);
String lineString = null;
while ((lineString = reader.readLine()) != null) {
String[] line = lineString.split(Properties.inputDelimiter.get());
String name = line[0];
ResultType failure = ResultType.valueOf(line[1]);
String query = line[2];
String expected = line[3];
String actual = line[4];
ReportItem reportItem = new DetailReportItem(name, failure, query, expected, actual);
items.add(reportItem);
}
previousDetailReport.setItems(items);
reader.close();
is.close();
return previousDetailReport;
}
private void printReportToLocalDisk(String fileName, String header, List<ReportItem> reportLines) throws Throwable
{
File file = new File(fileName);
OutputStream out = new FileOutputStream(file);
Writer writer = new OutputStreamWriter(out, ENCODING);
for (ReportItem line : reportLines) {
writer.write(line.toString());
writer.write("\n");
}
writer.close();
out.close();
}
private String getHeaderForFlatFilePrint(String[] reportHeader)
{
Joiner joiner = Joiner.on(Properties.inputDelimiter.get());
return joiner.join(reportHeader);
}
}
| 9,270 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/report/ReportType.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report;
public enum ReportType {
summary, details;
}
| 9,271 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/google/GoogleDataExtractor.java | /**
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.google;
import java.io.*;
import java.util.List;
import java.util.Map;
import com.netflix.search.query.utils.HeaderUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import com.netflix.search.query.Properties;
import com.netflix.search.query.input.TitleWithQueries;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.detail.DetailReport;
import com.netflix.search.query.report.summary.SummaryReport;
public class GoogleDataExtractor {
public static final Logger logger = LoggerFactory.getLogger(GoogleDataExtractor.class);
private static final String ENCODING = "UTF-8";
private Map<String, Map<Integer, TitleWithQueries>> titlesWithQueriesPerDataset = Maps.newLinkedHashMap();
private Report previousDetailReport = new DetailReport();
private Report previousSummaryReport = new SummaryReport();
private GoogleSheetsService searchGoogleSheetsService = null;
public GoogleDataExtractor() {
super();
}
public Map<String, Map<Integer, TitleWithQueries>> getTitlesWithQueriesPerDataset() {
return titlesWithQueriesPerDataset;
}
public Report getPreviousDetailReport() {
return previousDetailReport;
}
public Report getPreviousSummaryReport() {
return previousSummaryReport;
}
public static void main(String[] args) {
GoogleDataExtractor s = new GoogleDataExtractor();
}
public void initExtractor() {
searchGoogleSheetsService = new GoogleSheetsService();
}
public void setReportNamesAndDownloadData() throws Throwable {
searchGoogleSheetsService.setUpReportNames();
downloadQueries();
downloadReports();
}
public void downloadQueries() throws Throwable {
for (String sheetId : Properties.validDataSetsId.get()) {
logger.info("Initializing and Downloading: " + sheetId);
List<List<Object>> spreadsheetData = searchGoogleSheetsService.getSpreadsheetDataForQueries(sheetId);
if (spreadsheetData != null && spreadsheetData.size() != 0) {
Map<Integer, TitleWithQueries> titlesWithQueries = searchGoogleSheetsService.getTitlesWithQueries(spreadsheetData, sheetId);
titlesWithQueriesPerDataset.put(sheetId, titlesWithQueries);
List<String> titlesWithQueriesAsTsv = searchGoogleSheetsService.extractWorksheetData(spreadsheetData, null);
writeReportToLocalDisk(sheetId, titlesWithQueriesAsTsv);
} else {
logger.info("Sheet doesn't exist or it is empty: " + sheetId);
}
if (Properties.googleApiThrottlePause.get() > 0) Thread.sleep(Properties.googleApiThrottlePause.get());
}
}
public void downloadReports() throws Throwable {
previousSummaryReport = searchGoogleSheetsService.extractReport(false);
logger.info("Initializing and Downloading: " + previousSummaryReport);
List<String> previousSummaryReportAsTsv = searchGoogleSheetsService.getLatestSummaryReportAsTsv(previousSummaryReport);
writeReportToLocalDisk("summary_previous", previousSummaryReportAsTsv);
previousDetailReport = searchGoogleSheetsService.extractReport(true);
logger.info("Initializing and Downloading: " + previousDetailReport);
List<String> previousDetailReportAsTsv = searchGoogleSheetsService.getLatestDetailReportAsTsv(previousDetailReport);
writeReportToLocalDisk("details_previous", previousDetailReportAsTsv);
}
private void writeReportToLocalDisk(String sheetId, List<String> titlesWithQueries) throws Throwable {
if (titlesWithQueries != null) {
File file = new File(Properties.dataDir.get() + sheetId + ".tsv");
OutputStream out = new FileOutputStream(file);
Writer writer = new OutputStreamWriter(out, ENCODING);
for (String t : titlesWithQueries) {
writer.write(t);
writer.write("\n");
}
writer.close();
out.close();
}
}
public void writeTitleQueriesToLocalDisk(String sheetId, Map<Integer, TitleWithQueries> titlesWithQueries) throws Throwable {
if (titlesWithQueries != null) {
File file = new File(Properties.dataDir.get() + sheetId);
OutputStream out = new FileOutputStream(file);
Writer writer = new OutputStreamWriter(out, ENCODING);
for (TitleWithQueries t : titlesWithQueries.values()) {
writer.write(t.toString());
writer.write("\n");
}
writer.close();
out.close();
}
}
public void publishReportToGoogleSpreadsheet(Report report) throws Throwable {
searchGoogleSheetsService.updateReport(report.reportNameForUpload(), HeaderUtils.getHeader(report.getReportType()), report.getItems(), (report instanceof DetailReport ? true : false));
}
}
| 9,272 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/google/GoogleSheetsService.java | /**
* Copyright 2016 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.google;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
import com.netflix.search.query.report.detail.DetailReport;
import com.netflix.search.query.report.detail.DetailReportHeader;
import com.netflix.search.query.report.summary.SummaryReport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.util.Lists;
import com.google.common.collect.Maps;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.services.sheets.v4.Sheets;
import com.google.api.services.sheets.v4.SheetsScopes;
import com.google.api.services.sheets.v4.model.*;
import com.google.api.services.sheets.v4.model.AddSheetRequest;
import com.google.api.services.sheets.v4.model.AppendDimensionRequest;
import com.google.api.services.sheets.v4.model.BatchUpdateSpreadsheetRequest;
import com.google.api.services.sheets.v4.model.BatchUpdateSpreadsheetResponse;
import com.google.api.services.sheets.v4.model.CellData;
import com.google.api.services.sheets.v4.model.ExtendedValue;
import com.google.api.services.sheets.v4.model.GridCoordinate;
import com.google.api.services.sheets.v4.model.Request;
import com.google.api.services.sheets.v4.model.RowData;
import com.google.api.services.sheets.v4.model.Sheet;
import com.google.api.services.sheets.v4.model.SheetProperties;
import com.google.api.services.sheets.v4.model.Spreadsheet;
import com.google.api.services.sheets.v4.model.UpdateCellsRequest;
import com.google.api.services.sheets.v4.model.ValueRange;
import com.netflix.search.query.Properties;
import com.netflix.search.query.input.TitleWithQueries;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ReportType;
import com.netflix.search.query.report.detail.DetailReportItem;
import com.netflix.search.query.report.summary.SummaryReportItem;
import com.netflix.search.query.utils.DateUtil;
import com.netflix.search.query.utils.HeaderUtils;
public class GoogleSheetsService {
public static final Logger logger = LoggerFactory.getLogger(GoogleSheetsService.class);
private String summaryReportName = "";
private String detailReportName = "";
private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance();
private static Sheets spreadsheetService = null;
private static final List<String> SCOPES = Collections.singletonList(SheetsScopes.SPREADSHEETS);
private DateUtil dateUtil = new DateUtil();
public GoogleSheetsService() {
super();
try {
initSpreadsheetService();
} catch (Throwable e) {
logger.error("Error trying to init the GoogleSheetsService", e);
}
}
public void setUpReportNames() {
this.summaryReportName = Properties.sumReportSheet.get();
this.detailReportName = Properties.detailReportSheet.get();
if (Properties.isDevOnlyTest.get()) {
this.summaryReportName = Properties.sumReportSheetDev.get();
this.detailReportName = Properties.detailReportSheetDev.get();
}
}
private void initSpreadsheetService() throws Throwable {
final NetHttpTransport HTTP_TRANSPORT = GoogleNetHttpTransport.newTrustedTransport();
spreadsheetService = new Sheets.Builder(HTTP_TRANSPORT, JSON_FACTORY, getCredentials(HTTP_TRANSPORT)).setApplicationName(Properties.googleAppName.get()).build();
}
private static Credential getCredentials(final NetHttpTransport HTTP_TRANSPORT) throws Exception {
File privateKeyFile = new File(Properties.googleSheetsKeyDir.get() + Properties.p12KeyFileName.get());
GoogleCredential cr = GoogleCredential
.fromStream(new FileInputStream(privateKeyFile))
.createScoped(SCOPES);
GoogleCredential.Builder builder = new GoogleCredential.Builder()
.setTransport(HTTP_TRANSPORT)
.setJsonFactory(JSON_FACTORY)
.setServiceAccountScopes(SCOPES)
.setServiceAccountId(cr.getServiceAccountId())
.setServiceAccountPrivateKey(cr.getServiceAccountPrivateKey())
.setServiceAccountPrivateKeyId(cr.getServiceAccountPrivateKeyId())
.setTokenServerEncodedUrl(cr.getTokenServerEncodedUrl())
.setServiceAccountUser(Properties.serviceAccountEmail.get());
return builder.build();
}
public List<List<Object>> getSpreadsheetDataForQueries(String worksheetId) throws Throwable {
List<List<Object>> values = null;
Spreadsheet spreadsheet = getSpreadsheet(Properties.inputQueriesSheet.get());
Sheet worksheet = getWorksheet(spreadsheet, worksheetId);
if (worksheet != null) {
ValueRange response = spreadsheetService.spreadsheets().values().get(Properties.inputQueriesSheet.get(), worksheetId).execute();
values = response.getValues();
}
return values;
}
public List<List<Object>> getSpreadsheetData(Spreadsheet spreadsheet, String spreadsheetName, String worksheetId) throws Throwable {
List<List<Object>> values = null;
Sheet worksheet = getWorksheet(spreadsheet, worksheetId);
if (worksheet != null) {
ValueRange response = spreadsheetService.spreadsheets().values().get(spreadsheetName, worksheetId).execute();
values = response.getValues();
}
return values;
}
public List<String> getLatestSummaryReportAsTsv(Report report) {
return extractWorksheetData(report, HeaderUtils.getHeader(ReportType.summary));
}
public List<String> getLatestDetailReportAsTsv(Report report) {
return extractWorksheetData(report, HeaderUtils.getHeader(ReportType.details));
}
public Report extractReport(boolean isDetailReport) throws Throwable {
Report report = null;
if (isDetailReport) report = new DetailReport();
else report = new SummaryReport();
List<ReportItem> reportItems = null;
String spreadsheetName = getReportName(isDetailReport);
Spreadsheet spreadsheet = getSpreadsheet(spreadsheetName);
String worksheetId = getLatestWorksheetId(spreadsheet);
List<List<Object>> spreadsheetData = getSpreadsheetData(spreadsheet, spreadsheetName, worksheetId);
if (spreadsheetData != null && spreadsheetData.size() > 0) {
reportItems = getReport(spreadsheetData, isDetailReport);
}
report.setItems(reportItems);
report.setDate(worksheetId);
return report;
}
private String getLatestWorksheetId(Spreadsheet spreadsheet) {
String worksheetId = null;
Date reportCurrentDate = new Date(Long.MIN_VALUE);
for (Sheet sheet : spreadsheet.getSheets()) {
String title = sheet.getProperties().getTitle();
if (title.equals("instructions") || title.equals("Sheet1") || title.startsWith("diff_") || title.startsWith("ignore_"))
continue;
Date date = dateUtil.getDateFromString(title);
if (date.after(reportCurrentDate)) {
reportCurrentDate = date;
worksheetId = title;
}
}
return worksheetId;
}
protected List<String> extractWorksheetData(List<List<Object>> values, String[] headerDefault) {
List<String> returnValue = Lists.newArrayList();
int startingIndex = headerDefault == null ? 0 : 1;
int headerSize = 0;
if (values != null && values.size() > 0) {
if (headerDefault != null) {
returnValue.add(Arrays.asList(headerDefault).stream().collect(Collectors.joining(Properties.inputDelimiter.get())));
headerSize = headerDefault.length;
} else {
headerSize = values.get(0).size();
}
for (List<Object> row : values.subList(startingIndex, values.size())) {
int diffInRowSize = headerSize - row.size();
StringBuilder trailingEmptyCells = new StringBuilder();
if (diffInRowSize > 0)
for (int i = 0; i < diffInRowSize; i++) trailingEmptyCells.append(Properties.inputDelimiter.get());
String rowAsString = row.stream()
.map(object -> Objects.toString(object))
.collect(Collectors.joining(Properties.inputDelimiter.get()));
returnValue.add(rowAsString.concat(trailingEmptyCells.toString()));
}
}
return returnValue;
}
protected List<String> extractWorksheetData(Report report, String[] headerDefault) {
List<String> returnValue = Lists.newArrayList();
if (report != null && report.getItems().size() > 0) {
returnValue.add(Arrays.asList(headerDefault).stream().collect(Collectors.joining(Properties.inputDelimiter.get())));
for (ReportItem reportItem : report.getItems()) {
returnValue.add(reportItem.toString());
}
}
return returnValue;
}
private String getReportName(boolean isDetailReport) {
if (isDetailReport)
return detailReportName;
else
return summaryReportName;
}
private Spreadsheet getSpreadsheet(String reportSpreadsheetName) throws Throwable {
Spreadsheet spreadsheet = spreadsheetService.spreadsheets().get(reportSpreadsheetName).execute();
return spreadsheet;
}
public void updateReport(String worksheetId, String[] reportHeader, List<ReportItem> reportItems, boolean isDetailReport) throws Throwable {
String reportSpreadsheetName = getReportName(isDetailReport);
Spreadsheet spreadsheet = getSpreadsheet(reportSpreadsheetName);
int numberOfRows = reportItems.size() + 1;
int numberOfColumns = reportHeader.length;
Sheet sheet = addNewWorksheet(spreadsheet, worksheetId, numberOfRows, numberOfColumns);
if (sheet != null) {
importData(spreadsheet, sheet, reportItems, reportHeader, isDetailReport);
}
}
private Request insertValues(Integer sheetId, List<ReportItem> reportItems, String[] reportHeader, boolean isDetailReport) {
GridCoordinate grid = new GridCoordinate().setSheetId(sheetId).setRowIndex(0).setColumnIndex(0);
List<RowData> rowData = new ArrayList<>();
List<CellData> cellData = new ArrayList<>();
for (String headerItem : reportHeader) {
cellData.add(new CellData().setUserEnteredValue(new ExtendedValue().setStringValue(headerItem)));
}
rowData.add(new RowData().setValues(cellData));
for (ReportItem row : reportItems) {
cellData = new ArrayList<>();
int columnCount = 0;
for (Map.Entry<String, String> cell : row.getNamedValues().entrySet()) {
if (isDetailReport || (!isDetailReport && (cell.getKey().equals("name") || cell.getKey().equals("precision") || cell.getKey().equals("recall") || cell.getKey().equals("fmeasure") || cell.getKey().equals("comments"))))
cellData.add(new CellData().setUserEnteredValue(new ExtendedValue().setStringValue(cell.getValue())));
else
cellData.add(new CellData().setUserEnteredValue(new ExtendedValue().setNumberValue(Double.valueOf(cell.getValue()))));
columnCount++;
}
rowData.add(new RowData().setValues(cellData));
}
Request request = new Request()
.setUpdateCells(new UpdateCellsRequest()
.setStart(grid)
.setRows(rowData)
.setFields("userEnteredValue"));
return request;
}
public void importData(Spreadsheet spreadsheet, Sheet sheet, List<ReportItem> reportItems, String[] reportHeader, boolean isDetailReport) throws IOException {
Integer sheetId = sheet.getProperties().getSheetId();
List<Request> requests = new ArrayList<Request>();
if (reportItems.size() > 1000) {
requests.add(appendEmptyRows(sheetId, reportItems.size() - 1000));
}
requests.add(insertValues(sheetId, reportItems, reportHeader, isDetailReport));
postRequests(spreadsheet, requests);
logger.info("Events imported.");
}
private Sheet addNewWorksheet(Spreadsheet spreadsheet, String worksheetId, int numberOfRows, int numberOfColumns) throws Throwable {
Sheet sheet = null;
if (spreadsheet != null) {
Request request = new Request().setAddSheet(new AddSheetRequest().setProperties(new SheetProperties().setTitle(worksheetId).setGridProperties(new GridProperties().setRowCount(numberOfRows).setColumnCount(numberOfColumns))));
BatchUpdateSpreadsheetResponse response = postRequest(spreadsheet, request);
Spreadsheet updatedSpreadsheet = response.getUpdatedSpreadsheet();
sheet = getWorksheet(updatedSpreadsheet, worksheetId);
logger.info("Sheet {} created.", worksheetId);
return sheet;
}
return sheet;
}
private Sheet getWorksheet(Spreadsheet spreadsheet, String worksheetId) {
for (Sheet sheet : spreadsheet.getSheets()) {
if (sheet.getProperties().getTitle().equals(worksheetId))
return sheet;
}
logger.error("Sheet {} NOT found.", worksheetId);
return null;
}
public Map<Integer, TitleWithQueries> getTitlesWithQueries(List<List<Object>> values, String worksheetId) {
Map<Integer, TitleWithQueries> returnValue = Maps.newLinkedHashMap();
for (int rowIndex = 1; rowIndex < values.size(); rowIndex++) {
TitleWithQueries titleWithQueries = returnValue.get(rowIndex);
if (titleWithQueries == null)
titleWithQueries = new TitleWithQueries(worksheetId);
int columnIndex = 0;
for (Object cell : values.get(rowIndex)) {
if (cell != null) {
String cellValue = cell.toString().trim();
String headerValue = values.get(0).get(columnIndex).toString();
titleWithQueries.setValue(headerValue, cellValue);
}
columnIndex++;
}
returnValue.put(rowIndex, titleWithQueries);
}
return returnValue;
}
private List<ReportItem> getReport(List<List<Object>> values, boolean isDetailReport) {
List<ReportItem> returnValue = Lists.newArrayList();
ReportItem reportRowItem = null;
for (int rowIndex = 1; rowIndex < values.size(); rowIndex++) {
int columnIndex = 0;
if (isDetailReport) reportRowItem = new DetailReportItem();
else reportRowItem = new SummaryReportItem();
for (Object cell : values.get(rowIndex)) {
if (cell != null) {
String cellValue = cell.toString().trim();
String headerValue = values.get(0).get(columnIndex).toString();
reportRowItem.setValue(headerValue, cellValue);
}
columnIndex++;
}
if (isDetailReport) {
if (!reportRowItem.getNamedValues().containsKey(DetailReportHeader.expected.toString()))
reportRowItem.setValue(DetailReportHeader.expected.toString(), "");
if (!reportRowItem.getNamedValues().containsKey(DetailReportHeader.actual.toString()))
reportRowItem.setValue(DetailReportHeader.actual.toString(), "");
}
if (reportRowItem != null)
returnValue.add(reportRowItem);
}
return returnValue;
}
private BatchUpdateSpreadsheetResponse postRequest(Spreadsheet spreadsheet, Request request) throws IOException {
List<Request> requests = Arrays.asList(request);
BatchUpdateSpreadsheetRequest body = new BatchUpdateSpreadsheetRequest().setRequests(requests).setIncludeSpreadsheetInResponse(true);
BatchUpdateSpreadsheetResponse response = spreadsheetService.spreadsheets().batchUpdate(spreadsheet.getSpreadsheetId(), body).execute();
return response;
}
private BatchUpdateSpreadsheetResponse postRequests(Spreadsheet spreadsheet, List<Request> requests) throws IOException {
BatchUpdateSpreadsheetRequest body = new BatchUpdateSpreadsheetRequest().setRequests(requests).setIncludeSpreadsheetInResponse(true);
BatchUpdateSpreadsheetResponse response = spreadsheetService.spreadsheets().batchUpdate(spreadsheet.getSpreadsheetId(), body).execute();
return response;
}
private Request appendEmptyRows(Integer sheetId, Integer length) {
Request request = new Request().setAppendDimension(new AppendDimensionRequest().setSheetId(sheetId).setDimension("ROWS").setLength(length));
return request;
}
}
| 9,273 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/detail/DetailReport.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.detail;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Joiner;
import com.google.common.collect.Sets;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ReportType;
import com.netflix.search.query.report.ResultType;
public class DetailReport extends Report {
public static final String NEW = "NEW";
public static final String NONE = "NONE";
public static final String FIXED = "FIXED";
private static final String SEPARATOR = "~~~";
public DetailReport(List<ReportItem> items) {
super();
this.setItems(items);
}
public DetailReport() {
super();
}
@Override
public ReportType getReportType()
{
return ReportType.details;
}
@Override
protected String getReportName()
{
return ReportType.details.toString();
}
@Override
protected Report newReport(List<ReportItem> items)
{
return new DetailReportDiff(items);
}
public void updateReport(Map<String, Set<String>> queryToIds, String q, Set<String> results, String testName, Map<String, String> titleIdToName, Map<ResultType, Integer> counters)
{
if (!results.equals(queryToIds.get(q))) {
String expectedTitles = getTitles(queryToIds.get(q), titleIdToName);
if (results.size() > 0) {
Set<String> intersection = Sets.intersection(queryToIds.get(q), results);
Set<String> uniqExpected = new HashSet<String>(queryToIds.get(q));
uniqExpected.removeAll(intersection);
expectedTitles = getTitles(uniqExpected, titleIdToName);
Set<String> uniqActual = new HashSet<String>(results);
uniqActual.removeAll(intersection);
String actualTitles = getTitles(uniqActual, titleIdToName);
if (results.containsAll(queryToIds.get(q))) {
getItems().add(new DetailReportItem(testName, ResultType.supersetResultsFailed, q, expectedTitles, actualTitles));
updateCounter(counters, ResultType.supersetResultsFailed);
} else {
getItems().add(new DetailReportItem(testName, ResultType.differentResultsFailed, q, expectedTitles, actualTitles));
updateCounter(counters, ResultType.differentResultsFailed);
}
} else {
getItems().add(new DetailReportItem(testName, ResultType.noResultsFailed, q, expectedTitles, NONE));
updateCounter(counters, ResultType.noResultsFailed);
}
} else
updateCounter(counters, ResultType.successQ);
}
private void updateCounter(Map<ResultType, Integer> counters, ResultType type)
{
Integer failureCounter = counters.get(type);
if (failureCounter == null)
failureCounter = 0;
counters.put(type, ++failureCounter);
}
private String getTitles(Set<String> ids, Map<String, String> titleIdToName) {
String returnValue = "";
Joiner joiner = Joiner.on(SEPARATOR);
if (ids != null && titleIdToName != null && titleIdToName.keySet() != null) {
Set<String> intersection = Sets.intersection(ids, titleIdToName.keySet());
Map<String, String> copy = new LinkedHashMap<String, String>(titleIdToName);
copy.keySet().retainAll(intersection);
returnValue = joiner.join(copy.values());
}
return returnValue;
}
@Override
protected ReportItem getDiffForReportItem(ReportItem previousItem, ReportItem currentItem)
{
ReportItem returnValue = null;
if (previousItem != null) {
if (currentItem != null) {
// TODO: DO NOTHING, they are essentially the same, ignoring the lists for now
} else {
returnValue = new DetailReportItem(new LinkedHashMap<String, String>(previousItem.getNamedValues()));
returnValue.setValue(DetailReportHeader.comments.toString(), FIXED);
}
} else {
returnValue = new DetailReportItem(new LinkedHashMap<String, String>(currentItem.getNamedValues()));
returnValue.setValue(DetailReportHeader.comments.toString(), NEW);
}
return returnValue;
}
}
| 9,274 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/detail/DetailReportHeader.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.detail;
public enum DetailReportHeader {
name, failure, query, expected, actual, comments;
}
| 9,275 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/detail/DetailReportItem.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.detail;
import java.util.Map;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ResultType;
public class DetailReportItem extends ReportItem {
public DetailReportItem(String name, ResultType failure, String query, String expected, String actual) {
super();
setValue(DetailReportHeader.name.toString(), name);
setValue(DetailReportHeader.failure.toString(), failure.toString());
setValue(DetailReportHeader.query.toString(), query);
setValue(DetailReportHeader.expected.toString(), expected);
setValue(DetailReportHeader.actual.toString(), actual);
setKey(name + "_" + failure.toString() + "_" + query);
}
public DetailReportItem(Map<String, String> namedValues) {
super(namedValues);
}
public DetailReportItem() {
super();
}
@Override
protected void appendKeyFromNamedValues(String headerValue, String value)
{
if (headerValue.equals(DetailReportHeader.name.toString()))
setKey(value);
else if (headerValue.equals(DetailReportHeader.failure.toString()) || headerValue.equals(DetailReportHeader.query.toString()))
setKey(getKey() + "_" + value);
}
@Override
protected String getKeyFromNamedValues()
{
return getNamedValues().get(DetailReportHeader.name.toString()) + "_" + getNamedValues().get(DetailReportHeader.failure.toString()) + "_" + getNamedValues().get(DetailReportHeader.query.toString());
}
@Override
public String toString()
{
return getNamedValues().get(DetailReportHeader.name.toString()) + "\t" + getNamedValues().get(DetailReportHeader.failure.toString()) + "\t" + getNamedValues().get(DetailReportHeader.query.toString()) + "\t" + (getNamedValues().get(DetailReportHeader.expected.toString())==null?"":getNamedValues().get(DetailReportHeader.expected.toString())) + "\t" + getNamedValues().get(DetailReportHeader.actual.toString());
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DetailReportItem other = (DetailReportItem) obj;
if (getKey() == null) {
if (other.getKey() != null)
return false;
} else if (!getKey().equals(other.getKey()))
return false;
return true;
}
}
| 9,276 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/detail/DetailReportDiff.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.detail;
import java.util.List;
import com.netflix.search.query.report.ReportItem;
public class DetailReportDiff extends DetailReport {
public DetailReportDiff(List<ReportItem> items) {
super(items);
}
@Override
protected String getReportName()
{
return super.getReportName()+"_diff";
}
@Override
public String reportNameForUpload(){
return "diff_"+super.reportNameForUpload();
}
}
| 9,277 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/summary/SummaryReportHeader.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.summary;
public enum SummaryReportHeader {
name, titles, queries, supersetResultsFailed, differentResultsFailed, noResultsFailed, successQ, precision, recall, fmeasure, comments;
}
| 9,278 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/summary/SummaryReport.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.summary;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.Sets;
import com.netflix.search.query.report.Report;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ReportType;
import com.netflix.search.query.report.ResultType;
public class SummaryReport extends Report {
private static final String DROPPED = "dropped";
public static final String PERCENT_SIGN = "%";
public SummaryReport(List<ReportItem> items) {
super();
this.setItems(items);
}
public SummaryReport() {
super();
}
@Override
public ReportType getReportType()
{
return ReportType.summary;
}
@Override
protected String getReportName()
{
return ReportType.summary.toString();
}
@Override
protected Report newReport(List<ReportItem> items)
{
return new SummaryReportDiff(items);
}
@Override
protected ReportItem getDiffForReportItem(ReportItem previousItem, ReportItem currentItem)
{
ReportItem returnValue = new SummaryReportItem();
if (previousItem == null) {
if (currentItem != null)
returnValue = new SummaryReportItem(new LinkedHashMap<String, String>(currentItem.getNamedValues()));
} else {
for (String name : previousItem.getNamedValues().keySet()) {
String previousValue = previousItem.getNamedValues().get(name);
if (name.equals(SummaryReportHeader.comments.toString())) continue;
if (name.equals(SummaryReportHeader.name.toString())) {
returnValue.getNamedValues().put(SummaryReportHeader.name.toString(), previousValue);
} else {
if (currentItem == null) {
returnValue.getNamedValues().put(SummaryReportHeader.comments.toString(), DROPPED);
} else {
String currentValue = currentItem.getNamedValues().get(name);
if (previousValue.contains(PERCENT_SIGN)) {
previousValue = previousValue.replaceAll(PERCENT_SIGN, "");
currentValue = currentValue.replaceAll(PERCENT_SIGN, "");
Double previousNumeric = Double.valueOf(previousValue);
Double currentNumeric = Double.valueOf(currentValue);
Double difference = currentNumeric - previousNumeric;
returnValue.setValue(name, (String.format("%.2f", (difference)) + PERCENT_SIGN));
} else {
Integer previousNumeric = Integer.valueOf(previousValue);
Integer currentNumeric = Integer.valueOf(currentValue);
Integer difference = currentNumeric - previousNumeric;
returnValue.setValue(name, String.valueOf(difference));
}
}
}
}
}
return returnValue;
}
public void updateStatistic(Set<String> relevantDocuments, Set<String> results, List<Double> precisionList, List<Double> recallList, List<Double> fMeasureList)
{
Double precision = 0d;
Double recall = 0d;
Double fMeasure = 0d;
if (results != null && relevantDocuments != null) {
Set<String> relevantRetrievedResults = Sets.intersection(results, relevantDocuments);
if (results.size() != 0)
precision = (double) relevantRetrievedResults.size() / (double) results.size();
if (relevantDocuments.size() != 0)
recall = (double) relevantRetrievedResults.size() / (double) relevantDocuments.size();
if (precision != 0 || recall != 0)
fMeasure = 2 * ((precision * recall) / (precision + recall));
}
precisionList.add(precision);
recallList.add(recall);
fMeasureList.add(fMeasure);
}
public void updateSummaryReport(String testName, int titlesTested, int queriesTested, List<Double> precisionList, List<Double> recallList, List<Double> fMeasureList,
Map<ResultType, Integer> counters)
{
getItems().add(new SummaryReportItem(testName, titlesTested, queriesTested, calculateAverage(precisionList), calculateAverage(recallList), calculateAverage(fMeasureList), counters));
}
private static double calculateAverage(List<Double> scores)
{
Double sum = 0d;
if (!scores.isEmpty()) {
for (Double mark : scores) {
sum += mark;
}
return sum.doubleValue() / scores.size();
}
return sum;
}
}
| 9,279 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/summary/SummaryReportItem.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.summary;
import java.util.Map;
import com.netflix.search.query.report.ReportItem;
import com.netflix.search.query.report.ResultType;
public class SummaryReportItem extends ReportItem {
public SummaryReportItem(String name, Integer titles, Integer queries, Double precision, Double recall, Double fmeasure, Map<ResultType, Integer> counters) {
super();
setValue(SummaryReportHeader.name.toString(), name);
setValue(SummaryReportHeader.titles.toString(), String.valueOf(titles));
setValue(SummaryReportHeader.queries.toString(), String.valueOf(queries));
for (ResultType type : ResultType.values()) {
Integer counter = counters.get(type);
if (counter == null)
counter = 0;
setValue(type.toString(), String.valueOf(counter));
}
setValue(SummaryReportHeader.precision.toString(), (String.format("%.2f", (precision * 100)) + SummaryReport.PERCENT_SIGN));
setValue(SummaryReportHeader.recall.toString(), (String.format("%.2f", (recall * 100)) + SummaryReport.PERCENT_SIGN));
setValue(SummaryReportHeader.fmeasure.toString(), (String.format("%.2f", (fmeasure * 100)) + SummaryReport.PERCENT_SIGN));
setKey(getKeyFromNamedValues());
}
public SummaryReportItem(Map<String, String> namedValues) {
super(namedValues);
}
public SummaryReportItem() {
super();
}
@Override
protected void appendKeyFromNamedValues(String headerValue, String value)
{
if (headerValue.equals(SummaryReportHeader.name.toString()))
setKey(value);
}
@Override
protected String getKeyFromNamedValues()
{
return getNamedValues().get(SummaryReportHeader.name.toString());
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SummaryReportItem other = (SummaryReportItem) obj;
if (getKey() == null) {
if (other.getKey() != null)
return false;
} else if (!getKey().equals(other.getKey()))
return false;
return true;
}
}
| 9,280 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/report | Create_ds/q/src/main/java/com/netflix/search/query/report/summary/SummaryReportDiff.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.report.summary;
import java.util.List;
import com.netflix.search.query.report.ReportItem;
public class SummaryReportDiff extends SummaryReport {
public SummaryReportDiff(List<ReportItem> items) {
super(items);
}
@Override
protected String getReportName()
{
return super.getReportName()+"_diff";
}
@Override
public String reportNameForUpload(){
return "diff_"+super.reportNameForUpload();
}
}
| 9,281 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/engine/BaseSearcher.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.netflix.search.query.Properties;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
public abstract class BaseSearcher {
public static final Logger logger = LoggerFactory.getLogger(BaseSearcher.class);
private Client client = Client.create();
public BaseSearcher() {
}
public Set<String> getResults(String q, List<String> languages, String dataSetId) throws Throwable
{
String urlForGettingDoc = getUrlForGettingDoc(q, languages, dataSetId);
if (Properties.isPrintUrl.get())
logger.info(urlForGettingDoc);
String jsonString = getJsonForQuery(q, languages, dataSetId);
WebResource webResource = client.resource(urlForGettingDoc);
ClientResponse response = null;
if (jsonString != null)
response = webResource.type("application/json").post(ClientResponse.class, jsonString);
else
response = webResource.get(ClientResponse.class);
if (response == null || (response.getStatus() != 201 && response.getStatus() != 200))
throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
String output = response.getEntity(String.class);
Set<String> results = getResultsFromServerResponse(output);
return results;
}
public static String getPhraseQueryString(String q)
{
if (q == null)
return null;
return "\"" + q.replaceAll("[\"|\\\\]", "") + "\"";
}
public static String getQueryFields(List<String> localeList)
{
StringBuffer sb = new StringBuffer();
if (localeList != null)
{
for (String fieldName : Properties.titleFields.get())
addNonDefaultLocaleTitleFieldName(localeList, sb, fieldName);
for (String fieldName : Properties.titleAkaFields.get())
addNonDefaultLocaleTitleFieldName(localeList, sb, fieldName);
}
return sb.toString().substring(0, sb.length()).trim();
}
protected static void addNonDefaultLocaleTitleFieldName(List<String> localeList, StringBuffer sb, String fieldName)
{
for (String locale : localeList)
{
sb.append(fieldName + "_" + locale + " ");
}
}
public abstract String getUrlForGettingDoc(String q, List<String> languages, String dataSetId);
public abstract Set<String> getResultsFromServerResponse(String output) throws IOException, JsonProcessingException;
public String getJsonForQuery(String q, List<String> languages, String dataSetId) throws JsonProcessingException
{
return null;
}
public String getServerUrl()
{
return "http://" + Properties.engineHost.get() + ":" + Properties.enginePort.get() + "/" + Properties.engineServlet.get() + "/" + Properties.engineIndexName.get();
}
}
| 9,282 |
0 | Create_ds/q/src/main/java/com/netflix/search/query | Create_ds/q/src/main/java/com/netflix/search/query/engine/BaseIndexer.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets;
import com.netflix.search.query.Properties;
import com.netflix.search.query.utils.StringUtils;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
public abstract class BaseIndexer {
public static final Logger logger = LoggerFactory.getLogger(BaseIndexer.class);
public static final String ENCODING = "UTF-8";
private static final int BUFFER_SIZE = 1 << 16; // 64K
private Client client = Client.create();
private String inputFileName=null;
private String testName=null;
public BaseIndexer(String inputFileName, String testName) {
this.inputFileName = inputFileName;
this.testName = testName;
}
public void indexData(List<String> languages) throws Throwable
{
long start = System.currentTimeMillis();
List<Map<String, Object>> docs = createDocs(languages);
update(docs);
commit();
logger.info("Indexing took: " + (System.currentTimeMillis() - start) + " ms");
}
protected List<Map<String, Object>> createDocs(List<String> languages) throws Throwable
{
List<Map<String, Object>> docs = new ArrayList<Map<String, Object>>();
InputStream is = new BufferedInputStream(new FileInputStream(inputFileName), BUFFER_SIZE);
BufferedReader reader = new BufferedReader(new InputStreamReader(is, ENCODING), BUFFER_SIZE);
String lineString = null;
while ((lineString = reader.readLine()) != null) {
String[] line = lineString.split(Properties.inputDelimiter.get());
if (lineString.startsWith(Properties.idField.get() + Properties.inputDelimiter.get()))
continue;
if (line.length < 3)
logger.error("Bad data: " + lineString);
else
docs.add(createDoc(line[0], line[1], line[2], line[3], languages));
}
reader.close();
is.close();
return docs;
}
public Map<String, Object> createDoc(String id, String english, String local, String altTitle, List<String> languages)
{
Map<String, Object> doc = new HashMap<String, Object>();
doc.put(Properties.idField.get(), StringUtils.createIdUsingTestName(id, testName));
for(String requiredField:Properties.requiredNumericFields.get())
doc.put(requiredField, 1);
for(String requiredField:Properties.requiredStringFields.get())
doc.put(requiredField, "query_testing_default");
if (local != null && local.length() > 0) {
for (String language: languages){
for(String fieldName: Properties.titleFields.get()) {
doc.put(fieldName + "_" + language, addValue(doc, language, fieldName, local));
//TODO: bug?
if (Properties.languagesRequiringTransliterationFromEnglish.get().contains(language) && fieldName.equals(Properties.transliterationFieldName.get()))
doc.put(fieldName + "_" + language, english);
}
}
}
if (english != null && english.length() > 0) {
for(String fieldName: Properties.titleFields.get())
doc.put(fieldName + "_en", english);
}
if (altTitle != null && altTitle.length() > 0) {
for (String language : languages) {
if (Properties.languagesRequiringAdditionalField.get().contains(language))
for (String fieldName : Properties.titleAkaFields.get()) {
doc.put(fieldName + "_" + language, addValue(doc, language, fieldName, altTitle));
}
}
}
doc.put(Properties.docTypeFieldName.get(), testName);
return doc;
}
private Set<String> addValue(Map<String, Object> doc, String language, String fieldName, String title)
{
@SuppressWarnings("unchecked")
Set<String> existingValues = (Set<String>)doc.get(fieldName + "_" + language);
if(existingValues==null) existingValues = Sets.newHashSet();
existingValues.add(title);
return existingValues;
}
private boolean update(List<Map<String, Object>> docs) throws IOException {
for (Map<String, Object> doc : docs) {
try {
addDoc(doc);
} catch (Throwable e) {
logger.error("bad doc" + doc);
throw new RuntimeException(e);
}
}
return true;
}
void addDoc(Map<String, Object> doc)
{
JsonNode node = new ObjectMapper().valueToTree(doc);
StringBuilder jsonString = getJsonStringOfDoc(node);
WebResource webResource = client.resource(getUrlForAddingDoc(doc));
ClientResponse response = webResource.type("application/json").post(ClientResponse.class, jsonString.toString());
if (response == null || (response.getStatus() != 201 && response.getStatus() != 200))
{
throw new RuntimeException("Failed : HTTP error code on adding a doc: " + response.getStatus());
}
response.close();
}
public StringBuilder getJsonStringOfDoc(JsonNode node)
{
StringBuilder jsonString = new StringBuilder("[");
nodeAsString(node, jsonString);
jsonString.append("]");
return jsonString;
}
public void nodeAsString(JsonNode node, StringBuilder jsonString)
{
try
{
jsonString.append(new ObjectMapper().writeValueAsString(node));
} catch (JsonProcessingException e)
{
logger.error("Error trying to generate a string from a json node", e);
}
}
void commit()
{
WebResource webResource = client.resource(getUrlForCommitting());
ClientResponse response = webResource.get(ClientResponse.class);
if (response == null || (response.getStatus() != 201 && response.getStatus() != 200))
{
throw new RuntimeException("Failed : HTTP error code on commit: " + response.getStatus());
}
response.close();
}
public String getServerUrl(){
return "http://" + Properties.engineHost.get() + ":" + Properties.enginePort.get() + "/" + Properties.engineServlet.get() + "/" + Properties.engineIndexName.get();
}
public abstract String getUrlForAddingDoc(Map<String, Object> doc);
public abstract String getUrlForCommitting();
} | 9,283 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/engine | Create_ds/q/src/main/java/com/netflix/search/query/engine/solr/SolrSearcher.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.solr;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.message.BasicNameValuePair;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.api.client.util.Lists;
import com.google.common.collect.Sets;
import com.netflix.search.query.Properties;
import com.netflix.search.query.engine.BaseIndexer;
import com.netflix.search.query.engine.BaseSearcher;
public class SolrSearcher extends BaseSearcher {
public String getUrlForGettingDoc(String q, List<String> languages, String dataSetId)
{
List<NameValuePair> parameters = Lists.newArrayList();
parameters.add(new BasicNameValuePair("q", getPhraseQueryString(q)));
parameters.add(new BasicNameValuePair("defType", "edismax"));
parameters.add(new BasicNameValuePair("lowercaseOperators", "false"));
parameters.add(new BasicNameValuePair("rows", "100000"));
parameters.add(new BasicNameValuePair("qs", "10"));
parameters.add(new BasicNameValuePair("fl", Properties.idField.get() + ", " + Properties.titleFields.get().get(0) + "_en"));
parameters.add(new BasicNameValuePair("sort", Properties.idField.get() + " DESC"));
parameters.add(new BasicNameValuePair("qf", getQueryFields(languages)));
parameters.add(new BasicNameValuePair("fq", Properties.docTypeFieldName.get() + ":" + dataSetId));
parameters.add(new BasicNameValuePair("wt", "json"));
return getServerUrl() + "/select?" + URLEncodedUtils.format(parameters, BaseIndexer.ENCODING);
}
public Set<String> getResultsFromServerResponse(String output) throws IOException, JsonProcessingException
{
Set<String> results = Sets.newHashSet();
ObjectMapper mapper = new ObjectMapper();
JsonNode actualObj = mapper.readTree(output);
JsonNode arrNode = actualObj.get("response").get("docs");
if (arrNode.isArray())
{
for (final JsonNode objNode : arrNode)
{
results.add(objNode.get("id").textValue());
}
}
return results;
}
}
| 9,284 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/engine | Create_ds/q/src/main/java/com/netflix/search/query/engine/solr/SolrIndexer.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.solr;
import java.util.Map;
import com.netflix.search.query.engine.BaseIndexer;
public class SolrIndexer extends BaseIndexer{
public SolrIndexer(String inputFileName, String testName) {
super(inputFileName, testName);
}
@Override
public String getUrlForAddingDoc(Map<String, Object> doc)
{
return getServerUrl()+"/update";
}
@Override
public String getUrlForCommitting()
{
return getServerUrl()+"/update?commit=true";
}
} | 9,285 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/engine | Create_ds/q/src/main/java/com/netflix/search/query/engine/es/ElasticsearchSearcher.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.es;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.netflix.search.query.Properties;
import com.netflix.search.query.engine.BaseSearcher;
public class ElasticsearchSearcher extends BaseSearcher {
@Override
public String getUrlForGettingDoc(String q, List<String> languages, String dataSetId)
{
return getServerUrl() + "/"+Properties.esDocType.get()+"/_search";
}
@Override
public String getJsonForQuery(String q, List<String> languages, String dataSetId) throws JsonProcessingException
{
Map<String, Object> multiMatchObject = Maps.newHashMap();
multiMatchObject.put("query", q);
multiMatchObject.put("type", "best_fields");
multiMatchObject.put("operator", "and");
multiMatchObject.put("fields", getQueryFields(languages).split("\\s+"));
Map<String, Object> queryObject = Maps.newHashMap();
queryObject.put("multi_match", multiMatchObject);
Map<String, Object> termObject = Maps.newHashMap();
termObject.put(Properties.docTypeFieldName.get(), dataSetId);
Map<String, Object> filterObject = Maps.newHashMap();
filterObject.put("term", termObject);
Map<String, Object> filteredObject = Maps.newHashMap();
filteredObject.put("query", queryObject);
filteredObject.put("filter", filterObject);
Map<String, Object> sortFieldObject = Maps.newHashMap();
sortFieldObject.put("order", "desc");
Map<String, Object> sortObject = Maps.newHashMap();
sortObject.put(Properties.idField.get(), sortFieldObject);
Map<String, Object> topLevelQueryObject = Maps.newHashMap();
topLevelQueryObject.put("filtered", filteredObject);
Map<String, Object> topNode = Maps.newHashMap();
topNode.put("query", topLevelQueryObject);
topNode.put("sort", sortObject);
JsonNode node = new ObjectMapper().valueToTree(topNode);
StringBuilder jsonString = new StringBuilder();
jsonString.append(new ObjectMapper().writeValueAsString(node));
return jsonString.toString();
}
@Override
public Set<String> getResultsFromServerResponse(String output) throws JsonProcessingException, IOException
{
Set<String> results = Sets.newHashSet();
ObjectMapper mapper = new ObjectMapper();
JsonNode actualObj = mapper.readTree(output);
JsonNode arrNode = actualObj.get("hits").get("hits");
if (arrNode.isArray())
{
for (final JsonNode objNode : arrNode)
{
results.add(objNode.get("_id").textValue());
}
}
return results;
}
}
| 9,286 |
0 | Create_ds/q/src/main/java/com/netflix/search/query/engine | Create_ds/q/src/main/java/com/netflix/search/query/engine/es/ElasticsearchIndexer.java | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.search.query.engine.es;
import java.util.Map;
import com.fasterxml.jackson.databind.JsonNode;
import com.netflix.search.query.Properties;
import com.netflix.search.query.engine.BaseIndexer;
public class ElasticsearchIndexer extends BaseIndexer {
public ElasticsearchIndexer(String inputFileName, String testName) {
super(inputFileName, testName);
}
@Override
public String getUrlForAddingDoc(Map<String, Object> doc)
{
return getServerUrl()+"/"+Properties.esDocType.get()+"/" + doc.get("id").toString();
}
@Override
public String getUrlForCommitting()
{
return getServerUrl()+"/_flush";
}
@Override
public StringBuilder getJsonStringOfDoc(JsonNode node)
{
StringBuilder jsonString = new StringBuilder();
nodeAsString(node, jsonString);
return jsonString;
}
} | 9,287 |
0 | Create_ds/AirMapView/library/src/test/java/com/airbnb/android | Create_ds/AirMapView/library/src/test/java/com/airbnb/android/airmapview/WebAirMapViewBuilderTest.java | package com.airbnb.android.airmapview;
import android.os.Bundle;
import org.junit.Ignore;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class WebAirMapViewBuilderTest {
@Test @Ignore("Can't really test this right now since we can' mock the Bundle")
public void shouldReturnGoogleWebAirMapViewByDefault() {
WebAirMapViewBuilder factory = new WebAirMapViewBuilder();
assertThat(factory.build(), instanceOf(GoogleWebViewMapFragment.class));
}
@Test public void shouldBuildGoogleWebAirMapViewWithOptions() {
WebAirMapViewBuilder factory = new WebAirMapViewBuilder();
GoogleWebMapType mapType = mock(GoogleWebMapType.class);
when(mapType.toBundle()).thenReturn(new Bundle());
assertThat(factory.withOptions(mapType).build(), instanceOf(GoogleWebViewMapFragment.class));
}
@Test public void shouldBuildGoogleChinaWebAirMapViewWithOptions() {
WebAirMapViewBuilder factory = new WebAirMapViewBuilder();
GoogleChinaMapType mapType = mock(GoogleChinaMapType.class);
when(mapType.toBundle()).thenReturn(new Bundle());
assertThat(factory.withOptions(mapType).build(),
instanceOf(GoogleChinaWebViewMapFragment.class));
}
}
| 9,288 |
0 | Create_ds/AirMapView/library/src/test/java/com/airbnb/android | Create_ds/AirMapView/library/src/test/java/com/airbnb/android/airmapview/NativeAirMapViewBuilderTest.java | package com.airbnb.android.airmapview;
import android.os.Bundle;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NativeAirMapViewBuilderTest {
@Test public void shouldBuildNativeAirMapView() {
NativeAirMapViewBuilder builder = new NativeAirMapViewBuilder();
AirGoogleMapOptions options = mock(AirGoogleMapOptions.class);
when(options.toBundle()).thenReturn(new Bundle());
assertThat(builder.withOptions(options).build(), instanceOf(NativeGoogleMapFragment.class));
}
}
| 9,289 |
0 | Create_ds/AirMapView/library/src/test/java/com/airbnb/android | Create_ds/AirMapView/library/src/test/java/com/airbnb/android/airmapview/DefaultAirMapViewBuilderTest.java | package com.airbnb.android.airmapview;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
public class DefaultAirMapViewBuilderTest {
@Before public void setUp() {
}
@Test public void shouldReturnNativeAirMapViewByDefault() {
DefaultAirMapViewBuilder factory = new DefaultAirMapViewBuilder(null, true);
assertThat(factory.builder(), instanceOf(NativeAirMapViewBuilder.class));
}
@Test public void shouldReturnWebAirMapViewIfDefaultNotSupported() {
DefaultAirMapViewBuilder factory = new DefaultAirMapViewBuilder(null, false);
assertThat(factory.builder(), instanceOf(WebAirMapViewBuilder.class));
}
@Test public void shouldReturnNativeAirMapViewWhenRequestedExplicitly() {
DefaultAirMapViewBuilder factory = new DefaultAirMapViewBuilder(null, true);
AirMapViewBuilder builder = factory.builder(AirMapViewTypes.NATIVE);
assertThat(builder, instanceOf(NativeAirMapViewBuilder.class));
}
@Test(expected = UnsupportedOperationException.class)
public void shouldThrowWhenRequestedNativeWebViewAndNotSupported() {
DefaultAirMapViewBuilder factory = new DefaultAirMapViewBuilder(null, false);
factory.builder(AirMapViewTypes.NATIVE);
}
@Test public void shouldReturnWebAirMapViewWhenRequestedExplicitly() {
DefaultAirMapViewBuilder factory = new DefaultAirMapViewBuilder(null, false);
AirMapViewBuilder builder = factory.builder(AirMapViewTypes.WEB);
assertThat(builder, instanceOf(WebAirMapViewBuilder.class));
}
}
| 9,290 |
0 | Create_ds/AirMapView/library/src/test/java/com/airbnb/android | Create_ds/AirMapView/library/src/test/java/com/airbnb/android/airmapview/AirMapTypeTest.java | package com.airbnb.android.airmapview;
import android.os.Bundle;
import org.hamcrest.core.IsEqual;
import org.junit.Test;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class AirMapTypeTest {
@Test public void shouldConvertToBundle() {
Bundle bundle = mock(Bundle.class);
AirMapType mapType = new GoogleWebMapType();
mapType.toBundle(bundle);
verify(bundle).putString("map_domain", mapType.getDomain());
verify(bundle).putString("map_url", mapType.getMapUrl());
verify(bundle).putString("map_file_name", mapType.getFileName());
}
@Test public void shouldConstructFromBundle() {
GoogleWebMapType mapType = new GoogleWebMapType();
Bundle bundle = mock(Bundle.class);
when(bundle.getString("map_domain", "")).thenReturn(mapType.getDomain());
when(bundle.getString("map_url", "")).thenReturn(mapType.getMapUrl());
when(bundle.getString("map_file_name", "")).thenReturn(mapType.getFileName());
assertThat(AirMapType.fromBundle(bundle), IsEqual.<AirMapType>equalTo(mapType));
}
}
| 9,291 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/GoogleWebMapType.java | package com.airbnb.android.airmapview;
public class GoogleWebMapType extends AirMapType {
public GoogleWebMapType() {
super("google_map.html", "https://maps.googleapis.com/maps/api/js", "www.googleapis.com");
}
}
| 9,292 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/AirMapType.java | package com.airbnb.android.airmapview;
import android.content.res.Resources;
import android.os.Bundle;
import java.util.Locale;
/** Defines maps to be used with {@link com.airbnb.android.airmapview.WebViewMapFragment} */
public class AirMapType {
private static final String ARG_MAP_DOMAIN = "map_domain";
private static final String ARG_FILE_NAME = "map_file_name";
private static final String ARG_MAP_URL = "map_url";
private final String fileName;
private final String mapUrl;
private final String domain;
public AirMapType(String fileName, String mapUrl, String domain) {
this.fileName = fileName;
this.mapUrl = mapUrl;
this.domain = domain;
}
/** @return the name of the HTML file in /assets */
String getFileName() {
return fileName;
}
/** @return the base URL for a maps API */
String getMapUrl() {
return mapUrl;
}
/** @return domain of the maps API to use */
String getDomain() {
return domain;
}
public Bundle toBundle() {
return toBundle(new Bundle());
}
public Bundle toBundle(Bundle bundle) {
bundle.putString(ARG_MAP_DOMAIN, getDomain());
bundle.putString(ARG_MAP_URL, getMapUrl());
bundle.putString(ARG_FILE_NAME, getFileName());
return bundle;
}
public static AirMapType fromBundle(Bundle bundle) {
return new AirMapType(
bundle.getString(ARG_FILE_NAME, ""),
bundle.getString(ARG_MAP_URL, ""),
bundle.getString(ARG_MAP_DOMAIN, ""));
}
public String getMapData(Resources resources) {
return AirMapUtils.getStringFromFile(resources, fileName)
.replace("MAPURL", mapUrl)
.replace("LANGTOKEN", Locale.getDefault().getLanguage())
.replace("REGIONTOKEN", Locale.getDefault().getCountry());
}
@SuppressWarnings("RedundantIfStatement")
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || !(o instanceof AirMapType)) {
return false;
}
AirMapType that = (AirMapType) o;
if (domain != null ? !domain.equals(that.domain) : that.domain != null) {
return false;
}
if (fileName != null ? !fileName.equals(that.fileName) : that.fileName != null) {
return false;
}
if (mapUrl != null ? !mapUrl.equals(that.mapUrl) : that.mapUrl != null) {
return false;
}
return true;
}
@Override public int hashCode() {
int result = fileName != null ? fileName.hashCode() : 0;
result = 31 * result + (mapUrl != null ? mapUrl.hashCode() : 0);
result = 31 * result + (domain != null ? domain.hashCode() : 0);
return result;
}
}
| 9,293 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/AirMapGeoJsonLayer.java | package com.airbnb.android.airmapview;
import androidx.annotation.ColorInt;
import com.google.maps.android.geojson.GeoJsonPolygonStyle;
public class AirMapGeoJsonLayer {
protected final String geoJson;
protected final float strokeWidth;
protected final int strokeColor;
protected final int fillColor;
private AirMapGeoJsonLayer(String geoJson, float strokeWidth, int strokeColor, int fillColor) {
this.geoJson = geoJson;
this.strokeWidth = strokeWidth;
this.strokeColor = strokeColor;
this.fillColor = fillColor;
}
public static class Builder {
private final String json;
// init with default styles
private final GeoJsonPolygonStyle style = new GeoJsonPolygonStyle();
public Builder(String json) {
this.json = json;
}
public Builder fillColor(@ColorInt int color) {
style.setFillColor(color);
return this;
}
public Builder strokeColor(@ColorInt int color) {
style.setStrokeColor(color);
return this;
}
public Builder strokeWidth(float width) {
style.setStrokeWidth(width);
return this;
}
public AirMapGeoJsonLayer build() {
return new AirMapGeoJsonLayer(
json, style.getStrokeWidth(), style.getStrokeColor(), style.getFillColor());
}
}
}
| 9,294 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/AirMapViewBuilder.java | package com.airbnb.android.airmapview;
public interface AirMapViewBuilder<T extends AirMapInterface, Q> {
AirMapViewBuilder<T, Q> withOptions(Q arg);
T build();
}
| 9,295 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/AirMapUtils.java | package com.airbnb.android.airmapview;
import android.content.res.Resources;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class AirMapUtils {
public static String getStringFromFile(Resources resources, String filePath) {
try {
InputStream is = resources.getAssets().open(filePath);
String ret = convertStreamToString(is);
is.close();
return ret;
} catch (IOException e) {
throw new RuntimeException("unable to load asset " + filePath);
}
}
public static String convertStreamToString(InputStream is) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
return sb.toString();
}
}
| 9,296 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/AirMapPolyline.java | package com.airbnb.android.airmapview;
import android.graphics.Color;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Polyline;
import com.google.android.gms.maps.model.PolylineOptions;
import java.util.List;
/**
* Helper class for keeping record of data needed to display a polyline, as well as an optional
* object T associated with the polyline.
*/
public class AirMapPolyline<T> {
private static final int STROKE_WIDTH = 1;
private static final int STROKE_COLOR = Color.BLUE;
private T object;
private int strokeWidth;
private long id;
private List<LatLng> points;
private String title;
private int strokeColor;
private Polyline googlePolyline;
public AirMapPolyline(List<LatLng> points, long id) {
this(null, points, id);
}
public AirMapPolyline(T object, List<LatLng> points, long id) {
this(object, points, id, STROKE_WIDTH, STROKE_COLOR);
}
public AirMapPolyline(T object, List<LatLng> points, long id, int strokeWidth, int strokeColor) {
this.object = object;
this.points = points;
this.id = id;
this.strokeWidth = strokeWidth;
this.strokeColor = strokeColor;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public List<LatLng> getPoints() {
return points;
}
public void setPoints(List<LatLng> points) {
this.points = points;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public T getObject() {
return object;
}
public void setObject(T object) {
this.object = object;
}
public int getStrokeWidth() {
return strokeWidth;
}
public int getStrokeColor() {
return strokeColor;
}
/**
* Add this polyline to the given {@link GoogleMap} instance
*
* @param googleMap the {@link GoogleMap} instance to which the polyline will be added
*/
public void addToGoogleMap(GoogleMap googleMap) {
// add the polyline and keep a reference so it can be removed
googlePolyline = googleMap.addPolyline(new PolylineOptions()
.addAll(points)
.width(strokeWidth)
.color(strokeColor));
}
/**
* Remove this polyline from a GoogleMap (if it was added).
*
* @return true if the {@link Polyline} was removed
*/
public boolean removeFromGoogleMap() {
if (googlePolyline != null) {
googlePolyline.remove();
return true;
}
return false;
}
}
| 9,297 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/NativeAirMapViewBuilder.java | package com.airbnb.android.airmapview;
import com.google.android.gms.maps.GoogleMapOptions;
/**
* AirMapView map that uses the native Google Maps implementation. IMPORTANT: In order to use this,
* Google Play Services needs to be installed on the device.
*/
public class NativeAirMapViewBuilder
implements AirMapViewBuilder<NativeGoogleMapFragment, AirGoogleMapOptions> {
private AirGoogleMapOptions options;
@Override public AirMapViewBuilder<NativeGoogleMapFragment, AirGoogleMapOptions> withOptions(
AirGoogleMapOptions options) {
this.options = options;
return this;
}
/**
* Build the map fragment with the requested options
*
* @return The {@link NativeGoogleMapFragment} map fragment.
*/
@Override public NativeGoogleMapFragment build() {
if (options == null) {
options = new AirGoogleMapOptions(new GoogleMapOptions());
}
return NativeGoogleMapFragment.newInstance(options);
}
}
| 9,298 |
0 | Create_ds/AirMapView/library/src/main/java/com/airbnb/android | Create_ds/AirMapView/library/src/main/java/com/airbnb/android/airmapview/LeafletBaiduMapType.java | package com.airbnb.android.airmapview;
public class LeafletBaiduMapType extends LeafletMapType {
public LeafletBaiduMapType() {
super("Baidu");
}
}
| 9,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.