gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package abi38_0_0.host.exp.exponent.modules.api.components.maps;
import android.view.View;
import abi38_0_0.com.facebook.react.bridge.Arguments;
import abi38_0_0.com.facebook.react.bridge.ReactApplicationContext;
import abi38_0_0.com.facebook.react.bridge.ReadableArray;
import abi38_0_0.com.facebook.react.bridge.ReadableMap;
import abi38_0_0.com.facebook.react.bridge.WritableMap;
import abi38_0_0.com.facebook.react.common.MapBuilder;
import abi38_0_0.com.facebook.react.modules.core.DeviceEventManagerModule;
import abi38_0_0.com.facebook.react.uimanager.LayoutShadowNode;
import abi38_0_0.com.facebook.react.uimanager.ThemedReactContext;
import abi38_0_0.com.facebook.react.uimanager.ViewGroupManager;
import abi38_0_0.com.facebook.react.uimanager.annotations.ReactProp;
import abi38_0_0.com.facebook.react.uimanager.events.RCTEventEmitter;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.GoogleMapOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.MapStyleOptions;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
public class AirMapManager extends ViewGroupManager<AirMapView> {
private static final String REACT_CLASS = "AIRMap";
private static final int ANIMATE_TO_REGION = 1;
private static final int ANIMATE_TO_COORDINATE = 2;
private static final int ANIMATE_TO_VIEWING_ANGLE = 3;
private static final int ANIMATE_TO_BEARING = 4;
private static final int FIT_TO_ELEMENTS = 5;
private static final int FIT_TO_SUPPLIED_MARKERS = 6;
private static final int FIT_TO_COORDINATES = 7;
private static final int SET_MAP_BOUNDARIES = 8;
private static final int ANIMATE_TO_NAVIGATION = 9;
private static final int SET_INDOOR_ACTIVE_LEVEL_INDEX = 10;
private static final int SET_CAMERA = 11;
private static final int ANIMATE_CAMERA = 12;
private final Map<String, Integer> MAP_TYPES = MapBuilder.of(
"standard", GoogleMap.MAP_TYPE_NORMAL,
"satellite", GoogleMap.MAP_TYPE_SATELLITE,
"hybrid", GoogleMap.MAP_TYPE_HYBRID,
"terrain", GoogleMap.MAP_TYPE_TERRAIN,
"none", GoogleMap.MAP_TYPE_NONE
);
private final Map<String, Integer> MY_LOCATION_PRIORITY = MapBuilder.of(
"balanced", LocationRequest.PRIORITY_BALANCED_POWER_ACCURACY,
"high", LocationRequest.PRIORITY_HIGH_ACCURACY,
"low", LocationRequest.PRIORITY_LOW_POWER,
"passive", LocationRequest.PRIORITY_NO_POWER
);
private final ReactApplicationContext appContext;
private AirMapMarkerManager markerManager;
protected GoogleMapOptions googleMapOptions;
public AirMapManager(ReactApplicationContext context) {
this.appContext = context;
this.googleMapOptions = new GoogleMapOptions();
}
public AirMapMarkerManager getMarkerManager() {
return this.markerManager;
}
public void setMarkerManager(AirMapMarkerManager markerManager) {
this.markerManager = markerManager;
}
@Override
public String getName() {
return REACT_CLASS;
}
@Override
protected AirMapView createViewInstance(ThemedReactContext context) {
return new AirMapView(context, this.appContext, this, googleMapOptions);
}
private void emitMapError(ThemedReactContext context, String message, String type) {
WritableMap error = Arguments.createMap();
error.putString("message", message);
error.putString("type", type);
context
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit("onError", error);
}
@ReactProp(name = "region")
public void setRegion(AirMapView view, ReadableMap region) {
view.setRegion(region);
}
@ReactProp(name = "initialRegion")
public void setInitialRegion(AirMapView view, ReadableMap initialRegion) {
view.setInitialRegion(initialRegion);
}
@ReactProp(name = "camera")
public void setCamera(AirMapView view, ReadableMap camera) {
view.setCamera(camera);
}
@ReactProp(name = "initialCamera")
public void setInitialCamera(AirMapView view, ReadableMap initialCamera) {
view.setInitialCamera(initialCamera);
}
@ReactProp(name = "mapType")
public void setMapType(AirMapView view, @Nullable String mapType) {
int typeId = MAP_TYPES.get(mapType);
view.map.setMapType(typeId);
}
@ReactProp(name = "customMapStyleString")
public void setMapStyle(AirMapView view, @Nullable String customMapStyleString) {
view.map.setMapStyle(new MapStyleOptions(customMapStyleString));
}
@ReactProp(name = "mapPadding")
public void setMapPadding(AirMapView view, @Nullable ReadableMap padding) {
int left = 0;
int top = 0;
int right = 0;
int bottom = 0;
double density = (double) view.getResources().getDisplayMetrics().density;
if (padding != null) {
if (padding.hasKey("left")) {
left = (int) (padding.getDouble("left") * density);
}
if (padding.hasKey("top")) {
top = (int) (padding.getDouble("top") * density);
}
if (padding.hasKey("right")) {
right = (int) (padding.getDouble("right") * density);
}
if (padding.hasKey("bottom")) {
bottom = (int) (padding.getDouble("bottom") * density);
}
}
view.map.setPadding(left, top, right, bottom);
}
@ReactProp(name = "showsUserLocation", defaultBoolean = false)
public void setShowsUserLocation(AirMapView view, boolean showUserLocation) {
view.setShowsUserLocation(showUserLocation);
}
@ReactProp(name = "userLocationPriority")
public void setUserLocationPriority(AirMapView view, @Nullable String accuracy) {
view.setUserLocationPriority(MY_LOCATION_PRIORITY.get(accuracy));
}
@ReactProp(name = "userLocationUpdateInterval", defaultInt = 5000)
public void setUserLocationUpdateInterval(AirMapView view, int updateInterval) {
view.setUserLocationUpdateInterval(updateInterval);
}
@ReactProp(name = "userLocationFastestInterval", defaultInt = 5000)
public void setUserLocationFastestInterval(AirMapView view, int fastestInterval) {
view.setUserLocationFastestInterval(fastestInterval);
}
@ReactProp(name = "showsMyLocationButton", defaultBoolean = true)
public void setShowsMyLocationButton(AirMapView view, boolean showMyLocationButton) {
view.setShowsMyLocationButton(showMyLocationButton);
}
@ReactProp(name = "toolbarEnabled", defaultBoolean = true)
public void setToolbarEnabled(AirMapView view, boolean toolbarEnabled) {
view.setToolbarEnabled(toolbarEnabled);
}
// This is a private prop to improve performance of panDrag by disabling it when the callback
// is not set
@ReactProp(name = "handlePanDrag", defaultBoolean = false)
public void setHandlePanDrag(AirMapView view, boolean handlePanDrag) {
view.setHandlePanDrag(handlePanDrag);
}
@ReactProp(name = "showsTraffic", defaultBoolean = false)
public void setShowTraffic(AirMapView view, boolean showTraffic) {
view.map.setTrafficEnabled(showTraffic);
}
@ReactProp(name = "showsBuildings", defaultBoolean = false)
public void setShowBuildings(AirMapView view, boolean showBuildings) {
view.map.setBuildingsEnabled(showBuildings);
}
@ReactProp(name = "showsIndoors", defaultBoolean = false)
public void setShowIndoors(AirMapView view, boolean showIndoors) {
view.map.setIndoorEnabled(showIndoors);
}
@ReactProp(name = "showsIndoorLevelPicker", defaultBoolean = false)
public void setShowsIndoorLevelPicker(AirMapView view, boolean showsIndoorLevelPicker) {
view.map.getUiSettings().setIndoorLevelPickerEnabled(showsIndoorLevelPicker);
}
@ReactProp(name = "showsCompass", defaultBoolean = false)
public void setShowsCompass(AirMapView view, boolean showsCompass) {
view.map.getUiSettings().setCompassEnabled(showsCompass);
}
@ReactProp(name = "scrollEnabled", defaultBoolean = false)
public void setScrollEnabled(AirMapView view, boolean scrollEnabled) {
view.map.getUiSettings().setScrollGesturesEnabled(scrollEnabled);
}
@ReactProp(name = "zoomEnabled", defaultBoolean = false)
public void setZoomEnabled(AirMapView view, boolean zoomEnabled) {
view.map.getUiSettings().setZoomGesturesEnabled(zoomEnabled);
}
@ReactProp(name = "zoomControlEnabled", defaultBoolean = true)
public void setZoomControlEnabled(AirMapView view, boolean zoomControlEnabled) {
view.map.getUiSettings().setZoomControlsEnabled(zoomControlEnabled);
}
@ReactProp(name = "rotateEnabled", defaultBoolean = false)
public void setRotateEnabled(AirMapView view, boolean rotateEnabled) {
view.map.getUiSettings().setRotateGesturesEnabled(rotateEnabled);
}
@ReactProp(name = "cacheEnabled", defaultBoolean = false)
public void setCacheEnabled(AirMapView view, boolean cacheEnabled) {
view.setCacheEnabled(cacheEnabled);
}
@ReactProp(name = "loadingEnabled", defaultBoolean = false)
public void setLoadingEnabled(AirMapView view, boolean loadingEnabled) {
view.enableMapLoading(loadingEnabled);
}
@ReactProp(name = "moveOnMarkerPress", defaultBoolean = true)
public void setMoveOnMarkerPress(AirMapView view, boolean moveOnPress) {
view.setMoveOnMarkerPress(moveOnPress);
}
@ReactProp(name = "loadingBackgroundColor", customType = "Color")
public void setLoadingBackgroundColor(AirMapView view, @Nullable Integer loadingBackgroundColor) {
view.setLoadingBackgroundColor(loadingBackgroundColor);
}
@ReactProp(name = "loadingIndicatorColor", customType = "Color")
public void setLoadingIndicatorColor(AirMapView view, @Nullable Integer loadingIndicatorColor) {
view.setLoadingIndicatorColor(loadingIndicatorColor);
}
@ReactProp(name = "pitchEnabled", defaultBoolean = false)
public void setPitchEnabled(AirMapView view, boolean pitchEnabled) {
view.map.getUiSettings().setTiltGesturesEnabled(pitchEnabled);
}
@ReactProp(name = "minZoomLevel")
public void setMinZoomLevel(AirMapView view, float minZoomLevel) {
view.map.setMinZoomPreference(minZoomLevel);
}
@ReactProp(name = "maxZoomLevel")
public void setMaxZoomLevel(AirMapView view, float maxZoomLevel) {
view.map.setMaxZoomPreference(maxZoomLevel);
}
@ReactProp(name = "kmlSrc")
public void setKmlSrc(AirMapView view, String kmlUrl) {
if (kmlUrl != null) {
view.setKmlSrc(kmlUrl);
}
}
@Override
public void receiveCommand(AirMapView view, int commandId, @Nullable ReadableArray args) {
Integer duration;
Double lat;
Double lng;
Double lngDelta;
Double latDelta;
float bearing;
float angle;
ReadableMap region;
ReadableMap camera;
switch (commandId) {
case SET_CAMERA:
camera = args.getMap(0);
view.animateToCamera(camera, 0);
break;
case ANIMATE_CAMERA:
camera = args.getMap(0);
duration = args.getInt(1);
view.animateToCamera(camera, duration);
break;
case ANIMATE_TO_NAVIGATION:
region = args.getMap(0);
lng = region.getDouble("longitude");
lat = region.getDouble("latitude");
LatLng location = new LatLng(lat, lng);
bearing = (float)args.getDouble(1);
angle = (float)args.getDouble(2);
duration = args.getInt(3);
view.animateToNavigation(location, bearing, angle, duration);
break;
case ANIMATE_TO_REGION:
region = args.getMap(0);
duration = args.getInt(1);
lng = region.getDouble("longitude");
lat = region.getDouble("latitude");
lngDelta = region.getDouble("longitudeDelta");
latDelta = region.getDouble("latitudeDelta");
LatLngBounds bounds = new LatLngBounds(
new LatLng(lat - latDelta / 2, lng - lngDelta / 2), // southwest
new LatLng(lat + latDelta / 2, lng + lngDelta / 2) // northeast
);
view.animateToRegion(bounds, duration);
break;
case ANIMATE_TO_COORDINATE:
region = args.getMap(0);
duration = args.getInt(1);
lng = region.getDouble("longitude");
lat = region.getDouble("latitude");
view.animateToCoordinate(new LatLng(lat, lng), duration);
break;
case ANIMATE_TO_VIEWING_ANGLE:
angle = (float)args.getDouble(0);
duration = args.getInt(1);
view.animateToViewingAngle(angle, duration);
break;
case ANIMATE_TO_BEARING:
bearing = (float)args.getDouble(0);
duration = args.getInt(1);
view.animateToBearing(bearing, duration);
break;
case FIT_TO_ELEMENTS:
view.fitToElements(args.getBoolean(0));
break;
case FIT_TO_SUPPLIED_MARKERS:
view.fitToSuppliedMarkers(args.getArray(0), args.getMap(1), args.getBoolean(2));
break;
case FIT_TO_COORDINATES:
view.fitToCoordinates(args.getArray(0), args.getMap(1), args.getBoolean(2));
break;
case SET_MAP_BOUNDARIES:
view.setMapBoundaries(args.getMap(0), args.getMap(1));
break;
case SET_INDOOR_ACTIVE_LEVEL_INDEX:
view.setIndoorActiveLevelIndex(args.getInt(0));
break;
}
}
@Override
@Nullable
public Map getExportedCustomDirectEventTypeConstants() {
Map<String, Map<String, String>> map = MapBuilder.of(
"onMapReady", MapBuilder.of("registrationName", "onMapReady"),
"onPress", MapBuilder.of("registrationName", "onPress"),
"onLongPress", MapBuilder.of("registrationName", "onLongPress"),
"onMarkerPress", MapBuilder.of("registrationName", "onMarkerPress"),
"onMarkerSelect", MapBuilder.of("registrationName", "onMarkerSelect"),
"onMarkerDeselect", MapBuilder.of("registrationName", "onMarkerDeselect"),
"onCalloutPress", MapBuilder.of("registrationName", "onCalloutPress")
);
map.putAll(MapBuilder.of(
"onUserLocationChange", MapBuilder.of("registrationName", "onUserLocationChange"),
"onMarkerDragStart", MapBuilder.of("registrationName", "onMarkerDragStart"),
"onMarkerDrag", MapBuilder.of("registrationName", "onMarkerDrag"),
"onMarkerDragEnd", MapBuilder.of("registrationName", "onMarkerDragEnd"),
"onPanDrag", MapBuilder.of("registrationName", "onPanDrag"),
"onKmlReady", MapBuilder.of("registrationName", "onKmlReady"),
"onPoiClick", MapBuilder.of("registrationName", "onPoiClick")
));
map.putAll(MapBuilder.of(
"onIndoorLevelActivated", MapBuilder.of("registrationName", "onIndoorLevelActivated"),
"onIndoorBuildingFocused", MapBuilder.of("registrationName", "onIndoorBuildingFocused"),
"onDoublePress", MapBuilder.of("registrationName", "onDoublePress"),
"onMapLoaded", MapBuilder.of("registrationName", "onMapLoaded")
));
return map;
}
@Nullable
@Override
public Map<String, Integer> getCommandsMap() {
Map<String, Integer> map = this.CreateMap(
"setCamera", SET_CAMERA,
"animateCamera", ANIMATE_CAMERA,
"animateToRegion", ANIMATE_TO_REGION,
"animateToCoordinate", ANIMATE_TO_COORDINATE,
"animateToViewingAngle", ANIMATE_TO_VIEWING_ANGLE,
"animateToBearing", ANIMATE_TO_BEARING,
"fitToElements", FIT_TO_ELEMENTS,
"fitToSuppliedMarkers", FIT_TO_SUPPLIED_MARKERS,
"fitToCoordinates", FIT_TO_COORDINATES,
"animateToNavigation", ANIMATE_TO_NAVIGATION
);
map.putAll(MapBuilder.of(
"setMapBoundaries", SET_MAP_BOUNDARIES,
"setIndoorActiveLevelIndex", SET_INDOOR_ACTIVE_LEVEL_INDEX
));
return map;
}
public static <K, V> Map<K, V> CreateMap(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9, K k10, V v10) {
Map map = new HashMap<K, V>();
map.put(k1, v1);
map.put(k2, v2);
map.put(k3, v3);
map.put(k4, v4);
map.put(k5, v5);
map.put(k6, v6);
map.put(k7, v7);
map.put(k8, v8);
map.put(k9, v9);
map.put(k10, v10);
return map;
}
@Override
public LayoutShadowNode createShadowNodeInstance() {
// A custom shadow node is needed in order to pass back the width/height of the map to the
// view manager so that it can start applying camera moves with bounds.
return new SizeReportingShadowNode();
}
@Override
public void addView(AirMapView parent, View child, int index) {
parent.addFeature(child, index);
}
@Override
public int getChildCount(AirMapView view) {
return view.getFeatureCount();
}
@Override
public View getChildAt(AirMapView view, int index) {
return view.getFeatureAt(index);
}
@Override
public void removeViewAt(AirMapView parent, int index) {
parent.removeFeatureAt(index);
}
@Override
public void updateExtraData(AirMapView view, Object extraData) {
view.updateExtraData(extraData);
}
void pushEvent(ThemedReactContext context, View view, String name, WritableMap data) {
context.getJSModule(RCTEventEmitter.class)
.receiveEvent(view.getId(), name, data);
}
@Override
public void onDropViewInstance(AirMapView view) {
view.doDestroy();
super.onDropViewInstance(view);
}
}
| |
package com.ambrosoft.exercises;
/**
* Copyright Ambrosoft, Inc. 2020
* User: jacek
* Date: Jul 11, 2006
* Time: 4:15:23 PM
*/
/*
File: LinkedQueue.java
Originally written by Doug Lea and released into the public domain.
This may be used for any purposes whatsoever without acknowledgment.
Thanks for the assistance and support of Sun Microsystems Labs,
and everyone contributing, testing, and using this code.
History:
Date Who What
11Jun1998 dl Create public version
25aug1998 dl added peek
10dec1998 dl added isEmpty
10oct1999 dl lock on node object to ensure visibility
*/
/**
* A linked list based channel implementation.
* The algorithm avoids contention between puts
* and takes when the queue is not empty.
* Normally a put and a take can proceed simultaneously.
* (Although it does not allow multiple concurrent puts or takes.)
* This class tends to perform more efficently than
* other Channel implementations in producer/consumer
* applications.
* <p>[<a href="http://gee.cs.oswego.edu/dl/classes/EDU/oswego/cs/dl/util/concurrent/intro.html"> Introduction to this package. </a>]
*/
final class LinkedQueue<E> implements Channel<E> {
/**
* A standard linked list node used in various queue classes *
*/
private static class LinkedNode<E> {
E value;
LinkedNode<E> next;
LinkedNode(E x) {
value = x;
}
LinkedNode(E x, LinkedNode<E> n) {
value = x;
next = n;
}
}
/**
* Dummy header node of list. The first actual node, if it exists, is always
* at head_.next. After each take, the old first node becomes the head.
*/
protected LinkedNode<E> head_;
/**
* Helper monitor for managing access to last node.
*/
protected final Object putLock_ = new Object();
/**
* The last node of list. Put() appends to list, so modifies last_
*/
protected LinkedNode<E> last_;
/**
* The number of threads waiting for a take.
* Notifications are provided in put only if greater than zero.
* The bookkeeping is worth it here since in reasonably balanced
* usages, the notifications will hardly ever be necessary, so
* the call overhead to notify can be eliminated.
*/
protected int waitingForTake_ = 0;
public LinkedQueue() {
head_ = new LinkedNode<E>(null);
last_ = head_;
}
/**
* Main mechanics for put/offer *
*/
protected void insert(final E x) {
synchronized (putLock_) {
final LinkedNode<E> p = new LinkedNode<E>(x);
synchronized (last_) {
last_.next = p;
last_ = p;
}
if (waitingForTake_ > 0) {
putLock_.notify();
}
}
}
/**
* Main mechanics for take/poll *
*/
protected synchronized E extract() {
synchronized (head_) {
E x = null;
final LinkedNode<E> first = head_.next;
if (first != null) {
x = first.value;
first.value = null;
head_ = first;
}
return x;
}
}
public void put(final E x) throws InterruptedException {
if (x == null) {
throw new IllegalArgumentException();
}
if (Thread.interrupted()) {
throw new InterruptedException();
}
insert(x);
}
public boolean offer(final E x, long msecs) throws InterruptedException {
if (x == null) {
throw new IllegalArgumentException();
}
if (Thread.interrupted()) {
throw new InterruptedException();
}
insert(x);
return true;
}
public E take() throws InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
} else {
// try to extract. If fail, then enter wait-based retry loop
E x = extract();
if (x != null) {
return x;
} else {
synchronized (putLock_) {
try {
++waitingForTake_;
for (; ; ) {
if ((x = extract()) != null) {
--waitingForTake_;
return x;
} else {
putLock_.wait();
}
}
} catch (InterruptedException ex) {
--waitingForTake_;
putLock_.notify();
throw ex;
}
}
}
}
}
public E peek() {
synchronized (head_) {
final LinkedNode<E> first = head_.next;
return first != null ? first.value : null;
}
}
public boolean isEmpty() {
synchronized (head_) {
return head_.next == null;
}
}
public E poll(final long msecs) throws InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
} else {
E x = extract();
if (x != null) {
return x;
} else {
synchronized (putLock_) {
try {
long waitTime = msecs;
long start = msecs <= 0 ? 0 : System.currentTimeMillis();
++waitingForTake_;
for (; ; ) {
x = extract();
if (x != null || waitTime <= 0) {
--waitingForTake_;
return x;
} else {
putLock_.wait(waitTime);
waitTime = msecs - (System.currentTimeMillis() - start);
}
}
} catch (InterruptedException ex) {
--waitingForTake_;
putLock_.notify();
throw ex;
}
}
}
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto
package com.google.cloud.videointelligence.v1p3beta1;
/**
*
*
* <pre>
* Config for TEXT_DETECTION.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.TextDetectionConfig}
*/
public final class TextDetectionConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p3beta1.TextDetectionConfig)
TextDetectionConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use TextDetectionConfig.newBuilder() to construct.
private TextDetectionConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TextDetectionConfig() {
languageHints_ = com.google.protobuf.LazyStringArrayList.EMPTY;
model_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TextDetectionConfig();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private TextDetectionConfig(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
languageHints_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
languageHints_.add(s);
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
model_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
languageHints_ = languageHints_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextDetectionConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextDetectionConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.class,
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.Builder.class);
}
public static final int LANGUAGE_HINTS_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList languageHints_;
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @return A list containing the languageHints.
*/
public com.google.protobuf.ProtocolStringList getLanguageHintsList() {
return languageHints_;
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @return The count of languageHints.
*/
public int getLanguageHintsCount() {
return languageHints_.size();
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param index The index of the element to return.
* @return The languageHints at the given index.
*/
public java.lang.String getLanguageHints(int index) {
return languageHints_.get(index);
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the languageHints at the given index.
*/
public com.google.protobuf.ByteString getLanguageHintsBytes(int index) {
return languageHints_.getByteString(index);
}
public static final int MODEL_FIELD_NUMBER = 2;
private volatile java.lang.Object model_;
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @return The model.
*/
@java.lang.Override
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
}
}
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @return The bytes for model.
*/
@java.lang.Override
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < languageHints_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, languageHints_.getRaw(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, model_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < languageHints_.size(); i++) {
dataSize += computeStringSizeNoTag(languageHints_.getRaw(i));
}
size += dataSize;
size += 1 * getLanguageHintsList().size();
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, model_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig other =
(com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig) obj;
if (!getLanguageHintsList().equals(other.getLanguageHintsList())) return false;
if (!getModel().equals(other.getModel())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getLanguageHintsCount() > 0) {
hash = (37 * hash) + LANGUAGE_HINTS_FIELD_NUMBER;
hash = (53 * hash) + getLanguageHintsList().hashCode();
}
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Config for TEXT_DETECTION.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.TextDetectionConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p3beta1.TextDetectionConfig)
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextDetectionConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextDetectionConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.class,
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
languageHints_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
model_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextDetectionConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig build() {
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig buildPartial() {
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig result =
new com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) != 0)) {
languageHints_ = languageHints_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.languageHints_ = languageHints_;
result.model_ = model_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig) {
return mergeFrom((com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig other) {
if (other
== com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig.getDefaultInstance())
return this;
if (!other.languageHints_.isEmpty()) {
if (languageHints_.isEmpty()) {
languageHints_ = other.languageHints_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureLanguageHintsIsMutable();
languageHints_.addAll(other.languageHints_);
}
onChanged();
}
if (!other.getModel().isEmpty()) {
model_ = other.model_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringList languageHints_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureLanguageHintsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
languageHints_ = new com.google.protobuf.LazyStringArrayList(languageHints_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @return A list containing the languageHints.
*/
public com.google.protobuf.ProtocolStringList getLanguageHintsList() {
return languageHints_.getUnmodifiableView();
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @return The count of languageHints.
*/
public int getLanguageHintsCount() {
return languageHints_.size();
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param index The index of the element to return.
* @return The languageHints at the given index.
*/
public java.lang.String getLanguageHints(int index) {
return languageHints_.get(index);
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the languageHints at the given index.
*/
public com.google.protobuf.ByteString getLanguageHintsBytes(int index) {
return languageHints_.getByteString(index);
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param index The index to set the value at.
* @param value The languageHints to set.
* @return This builder for chaining.
*/
public Builder setLanguageHints(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLanguageHintsIsMutable();
languageHints_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param value The languageHints to add.
* @return This builder for chaining.
*/
public Builder addLanguageHints(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLanguageHintsIsMutable();
languageHints_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param values The languageHints to add.
* @return This builder for chaining.
*/
public Builder addAllLanguageHints(java.lang.Iterable<java.lang.String> values) {
ensureLanguageHintsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, languageHints_);
onChanged();
return this;
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageHints() {
languageHints_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Language hint can be specified if the language to be detected is known a
* priori. It can increase the accuracy of the detection. Language hint must
* be language code in BCP-47 format.
* Automatic language detection is performed if no hint is provided.
* </pre>
*
* <code>repeated string language_hints = 1;</code>
*
* @param value The bytes of the languageHints to add.
* @return This builder for chaining.
*/
public Builder addLanguageHintsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureLanguageHintsIsMutable();
languageHints_.add(value);
onChanged();
return this;
}
private java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @return The model.
*/
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @return The bytes for model.
*/
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @param value The model to set.
* @return This builder for chaining.
*/
public Builder setModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearModel() {
model_ = getDefaultInstance().getModel();
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for text detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 2;</code>
*
* @param value The bytes for model to set.
* @return This builder for chaining.
*/
public Builder setModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
model_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p3beta1.TextDetectionConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.TextDetectionConfig)
private static final com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig();
}
public static com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TextDetectionConfig> PARSER =
new com.google.protobuf.AbstractParser<TextDetectionConfig>() {
@java.lang.Override
public TextDetectionConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TextDetectionConfig(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<TextDetectionConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TextDetectionConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.util.Apps;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public interface MRJobConfig {
// Put all of the attribute names in here so that Job and JobContext are
// consistent.
public static final String INPUT_FORMAT_CLASS_ATTR = "mapreduce.job.inputformat.class";
public static final String MAP_CLASS_ATTR = "mapreduce.job.map.class";
public static final String MAP_OUTPUT_COLLECTOR_CLASS_ATTR
= "mapreduce.job.map.output.collector.class";
public static final String COMBINE_CLASS_ATTR = "mapreduce.job.combine.class";
public static final String REDUCE_CLASS_ATTR = "mapreduce.job.reduce.class";
public static final String OUTPUT_FORMAT_CLASS_ATTR = "mapreduce.job.outputformat.class";
public static final String PARTITIONER_CLASS_ATTR = "mapreduce.job.partitioner.class";
public static final String SETUP_CLEANUP_NEEDED = "mapreduce.job.committer.setup.cleanup.needed";
public static final String TASK_CLEANUP_NEEDED = "mapreduce.job.committer.task.cleanup.needed";
public static final String JAR = "mapreduce.job.jar";
public static final String ID = "mapreduce.job.id";
public static final String JOB_NAME = "mapreduce.job.name";
public static final String JAR_UNPACK_PATTERN = "mapreduce.job.jar.unpack.pattern";
public static final String USER_NAME = "mapreduce.job.user.name";
public static final String PRIORITY = "mapreduce.job.priority";
public static final String QUEUE_NAME = "mapreduce.job.queuename";
public static final String RESERVATION_ID = "mapreduce.job.reservation.id";
public static final String JOB_TAGS = "mapreduce.job.tags";
public static final String JVM_NUMTASKS_TORUN = "mapreduce.job.jvm.numtasks";
public static final String SPLIT_FILE = "mapreduce.job.splitfile";
public static final String SPLIT_METAINFO_MAXSIZE = "mapreduce.job.split.metainfo.maxsize";
public static final long DEFAULT_SPLIT_METAINFO_MAXSIZE = 10000000L;
public static final String NUM_MAPS = "mapreduce.job.maps";
public static final String MAX_TASK_FAILURES_PER_TRACKER = "mapreduce.job.maxtaskfailures.per.tracker";
public static final String COMPLETED_MAPS_FOR_REDUCE_SLOWSTART = "mapreduce.job.reduce.slowstart.completedmaps";
public static final String NUM_REDUCES = "mapreduce.job.reduces";
public static final String SKIP_RECORDS = "mapreduce.job.skiprecords";
public static final String SKIP_OUTDIR = "mapreduce.job.skip.outdir";
// SPECULATIVE_SLOWNODE_THRESHOLD is obsolete and will be deleted in the future
@Deprecated
public static final String SPECULATIVE_SLOWNODE_THRESHOLD = "mapreduce.job.speculative.slownodethreshold";
public static final String SPECULATIVE_SLOWTASK_THRESHOLD = "mapreduce.job.speculative.slowtaskthreshold";
// SPECULATIVECAP is obsolete and will be deleted in the future
@Deprecated
public static final String SPECULATIVECAP = "mapreduce.job.speculative.speculativecap";
public static final String SPECULATIVECAP_RUNNING_TASKS =
"mapreduce.job.speculative.speculative-cap-running-tasks";
public static final double DEFAULT_SPECULATIVECAP_RUNNING_TASKS =
0.1;
public static final String SPECULATIVECAP_TOTAL_TASKS =
"mapreduce.job.speculative.speculative-cap-total-tasks";
public static final double DEFAULT_SPECULATIVECAP_TOTAL_TASKS =
0.01;
public static final String SPECULATIVE_MINIMUM_ALLOWED_TASKS =
"mapreduce.job.speculative.minimum-allowed-tasks";
public static final int DEFAULT_SPECULATIVE_MINIMUM_ALLOWED_TASKS =
10;
public static final String SPECULATIVE_RETRY_AFTER_NO_SPECULATE =
"mapreduce.job.speculative.retry-after-no-speculate";
public static final long DEFAULT_SPECULATIVE_RETRY_AFTER_NO_SPECULATE =
1000L;
public static final String SPECULATIVE_RETRY_AFTER_SPECULATE =
"mapreduce.job.speculative.retry-after-speculate";
public static final long DEFAULT_SPECULATIVE_RETRY_AFTER_SPECULATE =
15000L;
public static final String JOB_LOCAL_DIR = "mapreduce.job.local.dir";
public static final String OUTPUT_KEY_CLASS = "mapreduce.job.output.key.class";
public static final String OUTPUT_VALUE_CLASS = "mapreduce.job.output.value.class";
public static final String KEY_COMPARATOR = "mapreduce.job.output.key.comparator.class";
public static final String COMBINER_GROUP_COMPARATOR_CLASS = "mapreduce.job.combiner.group.comparator.class";
public static final String GROUP_COMPARATOR_CLASS = "mapreduce.job.output.group.comparator.class";
public static final String WORKING_DIR = "mapreduce.job.working.dir";
public static final String CLASSPATH_ARCHIVES = "mapreduce.job.classpath.archives";
public static final String CLASSPATH_FILES = "mapreduce.job.classpath.files";
public static final String CACHE_FILES = "mapreduce.job.cache.files";
public static final String CACHE_ARCHIVES = "mapreduce.job.cache.archives";
public static final String CACHE_FILES_SIZES = "mapreduce.job.cache.files.filesizes"; // internal use only
public static final String CACHE_ARCHIVES_SIZES = "mapreduce.job.cache.archives.filesizes"; // ditto
public static final String CACHE_LOCALFILES = "mapreduce.job.cache.local.files";
public static final String CACHE_LOCALARCHIVES = "mapreduce.job.cache.local.archives";
public static final String CACHE_FILE_TIMESTAMPS = "mapreduce.job.cache.files.timestamps";
public static final String CACHE_ARCHIVES_TIMESTAMPS = "mapreduce.job.cache.archives.timestamps";
public static final String CACHE_FILE_VISIBILITIES = "mapreduce.job.cache.files.visibilities";
public static final String CACHE_ARCHIVES_VISIBILITIES = "mapreduce.job.cache.archives.visibilities";
/**
* @deprecated Symlinks are always on and cannot be disabled.
*/
@Deprecated
public static final String CACHE_SYMLINK = "mapreduce.job.cache.symlink.create";
public static final String USER_LOG_RETAIN_HOURS = "mapreduce.job.userlog.retain.hours";
public static final String MAPREDUCE_JOB_USER_CLASSPATH_FIRST = "mapreduce.job.user.classpath.first";
public static final String MAPREDUCE_JOB_CLASSLOADER = "mapreduce.job.classloader";
/**
* A comma-separated list of services that function as ShuffleProvider aux-services
* (in addition to the built-in ShuffleHandler).
* These services can serve shuffle requests from reducetasks.
*/
public static final String MAPREDUCE_JOB_SHUFFLE_PROVIDER_SERVICES = "mapreduce.job.shuffle.provider.services";
public static final String MAPREDUCE_JOB_CLASSLOADER_SYSTEM_CLASSES = "mapreduce.job.classloader.system.classes";
public static final String IO_SORT_FACTOR = "mapreduce.task.io.sort.factor";
public static final String IO_SORT_MB = "mapreduce.task.io.sort.mb";
public static final String INDEX_CACHE_MEMORY_LIMIT = "mapreduce.task.index.cache.limit.bytes";
public static final String PRESERVE_FAILED_TASK_FILES = "mapreduce.task.files.preserve.failedtasks";
public static final String PRESERVE_FILES_PATTERN = "mapreduce.task.files.preserve.filepattern";
public static final String TASK_DEBUGOUT_LINES = "mapreduce.task.debugout.lines";
public static final String RECORDS_BEFORE_PROGRESS = "mapreduce.task.merge.progress.records";
public static final String SKIP_START_ATTEMPTS = "mapreduce.task.skip.start.attempts";
public static final String TASK_ATTEMPT_ID = "mapreduce.task.attempt.id";
public static final String TASK_ISMAP = "mapreduce.task.ismap";
public static final boolean DEFAULT_TASK_ISMAP = true;
public static final String TASK_PARTITION = "mapreduce.task.partition";
public static final String TASK_PROFILE = "mapreduce.task.profile";
public static final String TASK_PROFILE_PARAMS = "mapreduce.task.profile.params";
public static final String DEFAULT_TASK_PROFILE_PARAMS =
"-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,"
+ "verbose=n,file=%s";
public static final String NUM_MAP_PROFILES = "mapreduce.task.profile.maps";
public static final String NUM_REDUCE_PROFILES = "mapreduce.task.profile.reduces";
public static final String TASK_MAP_PROFILE_PARAMS = "mapreduce.task.profile.map.params";
public static final String TASK_REDUCE_PROFILE_PARAMS = "mapreduce.task.profile.reduce.params";
public static final String TASK_TIMEOUT = "mapreduce.task.timeout";
public static final String TASK_TIMEOUT_CHECK_INTERVAL_MS = "mapreduce.task.timeout.check-interval-ms";
public static final String TASK_ID = "mapreduce.task.id";
public static final String TASK_OUTPUT_DIR = "mapreduce.task.output.dir";
public static final String TASK_USERLOG_LIMIT = "mapreduce.task.userlog.limit.kb";
public static final String MAP_SORT_SPILL_PERCENT = "mapreduce.map.sort.spill.percent";
public static final String MAP_INPUT_FILE = "mapreduce.map.input.file";
public static final String MAP_INPUT_PATH = "mapreduce.map.input.length";
public static final String MAP_INPUT_START = "mapreduce.map.input.start";
public static final String MAP_MEMORY_MB = "mapreduce.map.memory.mb";
public static final int DEFAULT_MAP_MEMORY_MB = 1024;
public static final String MAP_CPU_VCORES = "mapreduce.map.cpu.vcores";
public static final int DEFAULT_MAP_CPU_VCORES = 1;
public static final String MAP_ENV = "mapreduce.map.env";
public static final String MAP_JAVA_OPTS = "mapreduce.map.java.opts";
public static final String MAP_MAX_ATTEMPTS = "mapreduce.map.maxattempts";
public static final String MAP_DEBUG_SCRIPT = "mapreduce.map.debug.script";
public static final String MAP_SPECULATIVE = "mapreduce.map.speculative";
public static final String MAP_FAILURES_MAX_PERCENT = "mapreduce.map.failures.maxpercent";
public static final String MAP_SKIP_INCR_PROC_COUNT = "mapreduce.map.skip.proc-count.auto-incr";
public static final String MAP_SKIP_MAX_RECORDS = "mapreduce.map.skip.maxrecords";
public static final String MAP_COMBINE_MIN_SPILLS = "mapreduce.map.combine.minspills";
public static final String MAP_OUTPUT_COMPRESS = "mapreduce.map.output.compress";
public static final String MAP_OUTPUT_COMPRESS_CODEC = "mapreduce.map.output.compress.codec";
public static final String MAP_OUTPUT_KEY_CLASS = "mapreduce.map.output.key.class";
public static final String MAP_OUTPUT_VALUE_CLASS = "mapreduce.map.output.value.class";
public static final String MAP_OUTPUT_KEY_FIELD_SEPERATOR = "mapreduce.map.output.key.field.separator";
public static final String MAP_LOG_LEVEL = "mapreduce.map.log.level";
public static final String REDUCE_LOG_LEVEL = "mapreduce.reduce.log.level";
public static final String DEFAULT_LOG_LEVEL = "INFO";
public static final String REDUCE_MERGE_INMEM_THRESHOLD = "mapreduce.reduce.merge.inmem.threshold";
public static final String REDUCE_INPUT_BUFFER_PERCENT = "mapreduce.reduce.input.buffer.percent";
public static final String REDUCE_MARKRESET_BUFFER_PERCENT = "mapreduce.reduce.markreset.buffer.percent";
public static final String REDUCE_MARKRESET_BUFFER_SIZE = "mapreduce.reduce.markreset.buffer.size";
public static final String REDUCE_MEMORY_MB = "mapreduce.reduce.memory.mb";
public static final int DEFAULT_REDUCE_MEMORY_MB = 1024;
public static final String REDUCE_CPU_VCORES = "mapreduce.reduce.cpu.vcores";
public static final int DEFAULT_REDUCE_CPU_VCORES = 1;
public static final String REDUCE_MEMORY_TOTAL_BYTES = "mapreduce.reduce.memory.totalbytes";
public static final String SHUFFLE_INPUT_BUFFER_PERCENT = "mapreduce.reduce.shuffle.input.buffer.percent";
public static final float DEFAULT_SHUFFLE_INPUT_BUFFER_PERCENT = 0.70f;
public static final String SHUFFLE_MEMORY_LIMIT_PERCENT
= "mapreduce.reduce.shuffle.memory.limit.percent";
public static final String SHUFFLE_MERGE_PERCENT = "mapreduce.reduce.shuffle.merge.percent";
public static final String REDUCE_FAILURES_MAXPERCENT = "mapreduce.reduce.failures.maxpercent";
public static final String REDUCE_ENV = "mapreduce.reduce.env";
public static final String REDUCE_JAVA_OPTS = "mapreduce.reduce.java.opts";
public static final String MAPREDUCE_JOB_DIR = "mapreduce.job.dir";
public static final String REDUCE_MAX_ATTEMPTS = "mapreduce.reduce.maxattempts";
public static final String SHUFFLE_PARALLEL_COPIES = "mapreduce.reduce.shuffle.parallelcopies";
public static final String REDUCE_DEBUG_SCRIPT = "mapreduce.reduce.debug.script";
public static final String REDUCE_SPECULATIVE = "mapreduce.reduce.speculative";
public static final String SHUFFLE_CONNECT_TIMEOUT = "mapreduce.reduce.shuffle.connect.timeout";
public static final String SHUFFLE_READ_TIMEOUT = "mapreduce.reduce.shuffle.read.timeout";
public static final String SHUFFLE_FETCH_FAILURES = "mapreduce.reduce.shuffle.maxfetchfailures";
public static final String MAX_ALLOWED_FETCH_FAILURES_FRACTION = "mapreduce.reduce.shuffle.max-fetch-failures-fraction";
public static final float DEFAULT_MAX_ALLOWED_FETCH_FAILURES_FRACTION = 0.5f;
public static final String MAX_FETCH_FAILURES_NOTIFICATIONS = "mapreduce.reduce.shuffle.max-fetch-failures-notifications";
public static final int DEFAULT_MAX_FETCH_FAILURES_NOTIFICATIONS = 3;
public static final String SHUFFLE_FETCH_RETRY_INTERVAL_MS = "mapreduce.reduce.shuffle.fetch.retry.interval-ms";
/** Default interval that fetcher retry to fetch during NM restart.*/
public final static int DEFAULT_SHUFFLE_FETCH_RETRY_INTERVAL_MS = 1000;
public static final String SHUFFLE_FETCH_RETRY_TIMEOUT_MS = "mapreduce.reduce.shuffle.fetch.retry.timeout-ms";
public static final String SHUFFLE_FETCH_RETRY_ENABLED = "mapreduce.reduce.shuffle.fetch.retry.enabled";
public static final String SHUFFLE_NOTIFY_READERROR = "mapreduce.reduce.shuffle.notify.readerror";
public static final String MAX_SHUFFLE_FETCH_RETRY_DELAY = "mapreduce.reduce.shuffle.retry-delay.max.ms";
public static final long DEFAULT_MAX_SHUFFLE_FETCH_RETRY_DELAY = 60000;
public static final String MAX_SHUFFLE_FETCH_HOST_FAILURES = "mapreduce.reduce.shuffle.max-host-failures";
public static final int DEFAULT_MAX_SHUFFLE_FETCH_HOST_FAILURES = 5;
public static final String REDUCE_SKIP_INCR_PROC_COUNT = "mapreduce.reduce.skip.proc-count.auto-incr";
public static final String REDUCE_SKIP_MAXGROUPS = "mapreduce.reduce.skip.maxgroups";
public static final String REDUCE_MEMTOMEM_THRESHOLD = "mapreduce.reduce.merge.memtomem.threshold";
public static final String REDUCE_MEMTOMEM_ENABLED = "mapreduce.reduce.merge.memtomem.enabled";
public static final String COMBINE_RECORDS_BEFORE_PROGRESS = "mapreduce.task.combine.progress.records";
public static final String JOB_NAMENODES = "mapreduce.job.hdfs-servers";
public static final String JOB_JOBTRACKER_ID = "mapreduce.job.kerberos.jtprinicipal";
public static final String JOB_CANCEL_DELEGATION_TOKEN = "mapreduce.job.complete.cancel.delegation.tokens";
public static final String JOB_ACL_VIEW_JOB = "mapreduce.job.acl-view-job";
public static final String DEFAULT_JOB_ACL_VIEW_JOB = " ";
public static final String JOB_ACL_MODIFY_JOB = "mapreduce.job.acl-modify-job";
public static final String DEFAULT_JOB_ACL_MODIFY_JOB = " ";
public static final String JOB_RUNNING_MAP_LIMIT =
"mapreduce.job.running.map.limit";
public static final int DEFAULT_JOB_RUNNING_MAP_LIMIT = 0;
public static final String JOB_RUNNING_REDUCE_LIMIT =
"mapreduce.job.running.reduce.limit";
public static final int DEFAULT_JOB_RUNNING_REDUCE_LIMIT = 0;
/* config for tracking the local file where all the credentials for the job
* credentials.
*/
public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY =
"mapreduce.job.credentials.binary";
/* Configs for tracking ids of tokens used by a job */
public static final String JOB_TOKEN_TRACKING_IDS_ENABLED =
"mapreduce.job.token.tracking.ids.enabled";
public static final boolean DEFAULT_JOB_TOKEN_TRACKING_IDS_ENABLED = false;
public static final String JOB_TOKEN_TRACKING_IDS =
"mapreduce.job.token.tracking.ids";
public static final String JOB_SUBMITHOST =
"mapreduce.job.submithostname";
public static final String JOB_SUBMITHOSTADDR =
"mapreduce.job.submithostaddress";
public static final String COUNTERS_MAX_KEY = "mapreduce.job.counters.max";
public static final int COUNTERS_MAX_DEFAULT = 120;
public static final String COUNTER_GROUP_NAME_MAX_KEY = "mapreduce.job.counters.group.name.max";
public static final int COUNTER_GROUP_NAME_MAX_DEFAULT = 128;
public static final String COUNTER_NAME_MAX_KEY = "mapreduce.job.counters.counter.name.max";
public static final int COUNTER_NAME_MAX_DEFAULT = 64;
public static final String COUNTER_GROUPS_MAX_KEY = "mapreduce.job.counters.groups.max";
public static final int COUNTER_GROUPS_MAX_DEFAULT = 50;
public static final String JOB_UBERTASK_ENABLE =
"mapreduce.job.ubertask.enable";
public static final String JOB_UBERTASK_MAXMAPS =
"mapreduce.job.ubertask.maxmaps";
public static final String JOB_UBERTASK_MAXREDUCES =
"mapreduce.job.ubertask.maxreduces";
public static final String JOB_UBERTASK_MAXBYTES =
"mapreduce.job.ubertask.maxbytes";
public static final String MAPREDUCE_JOB_EMIT_TIMELINE_DATA =
"mapreduce.job.emit-timeline-data";
public static final boolean DEFAULT_MAPREDUCE_JOB_EMIT_TIMELINE_DATA =
false;
public static final String MR_PREFIX = "yarn.app.mapreduce.";
public static final String MR_AM_PREFIX = MR_PREFIX + "am.";
/** The number of client retries to the AM - before reconnecting to the RM
* to fetch Application State.
*/
public static final String MR_CLIENT_TO_AM_IPC_MAX_RETRIES =
MR_PREFIX + "client-am.ipc.max-retries";
public static final int DEFAULT_MR_CLIENT_TO_AM_IPC_MAX_RETRIES = 3;
/** The number of client retries on socket timeouts to the AM - before
* reconnecting to the RM to fetch Application Status.
*/
public static final String MR_CLIENT_TO_AM_IPC_MAX_RETRIES_ON_TIMEOUTS =
MR_PREFIX + "client-am.ipc.max-retries-on-timeouts";
public static final int
DEFAULT_MR_CLIENT_TO_AM_IPC_MAX_RETRIES_ON_TIMEOUTS = 3;
/**
* The number of client retries to the RM/HS before throwing exception.
*/
public static final String MR_CLIENT_MAX_RETRIES =
MR_PREFIX + "client.max-retries";
public static final int DEFAULT_MR_CLIENT_MAX_RETRIES = 3;
/**
* How many times to retry jobclient calls (via getjob)
*/
public static final String MR_CLIENT_JOB_MAX_RETRIES =
MR_PREFIX + "client.job.max-retries";
public static final int DEFAULT_MR_CLIENT_JOB_MAX_RETRIES = 0;
/**
* How long to wait between jobclient retries on failure
*/
public static final String MR_CLIENT_JOB_RETRY_INTERVAL =
MR_PREFIX + "client.job.retry-interval";
public static final long DEFAULT_MR_CLIENT_JOB_RETRY_INTERVAL =
2000;
/** The staging directory for map reduce.*/
public static final String MR_AM_STAGING_DIR =
MR_AM_PREFIX+"staging-dir";
public static final String DEFAULT_MR_AM_STAGING_DIR =
"/tmp/hadoop-yarn/staging";
/** The amount of memory the MR app master needs.*/
public static final String MR_AM_VMEM_MB =
MR_AM_PREFIX+"resource.mb";
public static final int DEFAULT_MR_AM_VMEM_MB = 1536;
/** The number of virtual cores the MR app master needs.*/
public static final String MR_AM_CPU_VCORES =
MR_AM_PREFIX+"resource.cpu-vcores";
public static final int DEFAULT_MR_AM_CPU_VCORES = 1;
/** Command line arguments passed to the MR app master.*/
public static final String MR_AM_COMMAND_OPTS =
MR_AM_PREFIX+"command-opts";
public static final String DEFAULT_MR_AM_COMMAND_OPTS = "-Xmx1024m";
/** Admin command opts passed to the MR app master.*/
public static final String MR_AM_ADMIN_COMMAND_OPTS =
MR_AM_PREFIX+"admin-command-opts";
public static final String DEFAULT_MR_AM_ADMIN_COMMAND_OPTS = "";
/** Root Logging level passed to the MR app master.*/
public static final String MR_AM_LOG_LEVEL =
MR_AM_PREFIX+"log.level";
public static final String DEFAULT_MR_AM_LOG_LEVEL = "INFO";
public static final String MR_AM_LOG_KB =
MR_AM_PREFIX + "container.log.limit.kb";
public static final int DEFAULT_MR_AM_LOG_KB = 0; // don't roll
public static final String MR_AM_LOG_BACKUPS =
MR_AM_PREFIX + "container.log.backups";
public static final int DEFAULT_MR_AM_LOG_BACKUPS = 0;
/**The number of splits when reporting progress in MR*/
public static final String MR_AM_NUM_PROGRESS_SPLITS =
MR_AM_PREFIX+"num-progress-splits";
public static final int DEFAULT_MR_AM_NUM_PROGRESS_SPLITS = 12;
/**
* Upper limit on the number of threads user to launch containers in the app
* master. Expect level config, you shouldn't be needing it in most cases.
*/
public static final String MR_AM_CONTAINERLAUNCHER_THREAD_COUNT_LIMIT =
MR_AM_PREFIX+"containerlauncher.thread-count-limit";
public static final int DEFAULT_MR_AM_CONTAINERLAUNCHER_THREAD_COUNT_LIMIT =
500;
/**
* The initial size of thread pool to launch containers in the app master
*/
public static final String MR_AM_CONTAINERLAUNCHER_THREADPOOL_INITIAL_SIZE =
MR_AM_PREFIX+"containerlauncher.threadpool-initial-size";
public static final int DEFAULT_MR_AM_CONTAINERLAUNCHER_THREADPOOL_INITIAL_SIZE =
10;
/** Number of threads to handle job client RPC requests.*/
public static final String MR_AM_JOB_CLIENT_THREAD_COUNT =
MR_AM_PREFIX + "job.client.thread-count";
public static final int DEFAULT_MR_AM_JOB_CLIENT_THREAD_COUNT = 1;
/**
* Range of ports that the MapReduce AM can use when binding. Leave blank
* if you want all possible ports.
*/
public static final String MR_AM_JOB_CLIENT_PORT_RANGE =
MR_AM_PREFIX + "job.client.port-range";
/** Enable blacklisting of nodes in the job.*/
public static final String MR_AM_JOB_NODE_BLACKLISTING_ENABLE =
MR_AM_PREFIX + "job.node-blacklisting.enable";
/** Ignore blacklisting if a certain percentage of nodes have been blacklisted */
public static final String MR_AM_IGNORE_BLACKLISTING_BLACKLISTED_NODE_PERECENT =
MR_AM_PREFIX + "job.node-blacklisting.ignore-threshold-node-percent";
public static final int DEFAULT_MR_AM_IGNORE_BLACKLISTING_BLACKLISTED_NODE_PERCENT =
33;
/** Enable job recovery.*/
public static final String MR_AM_JOB_RECOVERY_ENABLE =
MR_AM_PREFIX + "job.recovery.enable";
public static final boolean MR_AM_JOB_RECOVERY_ENABLE_DEFAULT = true;
/**
* Limit on the number of reducers that can be preempted to ensure that at
* least one map task can run if it needs to. Percentage between 0.0 and 1.0
*/
public static final String MR_AM_JOB_REDUCE_PREEMPTION_LIMIT =
MR_AM_PREFIX + "job.reduce.preemption.limit";
public static final float DEFAULT_MR_AM_JOB_REDUCE_PREEMPTION_LIMIT = 0.5f;
/** AM ACL disabled. **/
public static final String JOB_AM_ACCESS_DISABLED =
"mapreduce.job.am-access-disabled";
public static final boolean DEFAULT_JOB_AM_ACCESS_DISABLED = false;
/**
* Limit reduces starting until a certain percentage of maps have finished.
* Percentage between 0.0 and 1.0
*/
public static final String MR_AM_JOB_REDUCE_RAMPUP_UP_LIMIT =
MR_AM_PREFIX + "job.reduce.rampup.limit";
public static final float DEFAULT_MR_AM_JOB_REDUCE_RAMP_UP_LIMIT = 0.5f;
/** The class that should be used for speculative execution calculations.*/
public static final String MR_AM_JOB_SPECULATOR =
MR_AM_PREFIX + "job.speculator.class";
/** Class used to estimate task resource needs.*/
public static final String MR_AM_TASK_ESTIMATOR =
MR_AM_PREFIX + "job.task.estimator.class";
/** The lambda value in the smoothing function of the task estimator.*/
public static final String MR_AM_TASK_ESTIMATOR_SMOOTH_LAMBDA_MS =
MR_AM_PREFIX
+ "job.task.estimator.exponential.smooth.lambda-ms";
public static final long DEFAULT_MR_AM_TASK_ESTIMATOR_SMOOTH_LAMBDA_MS =
1000L * 60;
/** true if the smoothing rate should be exponential.*/
public static final String MR_AM_TASK_ESTIMATOR_EXPONENTIAL_RATE_ENABLE =
MR_AM_PREFIX + "job.task.estimator.exponential.smooth.rate";
/** The number of threads used to handle task RPC calls.*/
public static final String MR_AM_TASK_LISTENER_THREAD_COUNT =
MR_AM_PREFIX + "job.task.listener.thread-count";
public static final int DEFAULT_MR_AM_TASK_LISTENER_THREAD_COUNT = 30;
/** How often the AM should send heartbeats to the RM.*/
public static final String MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS =
MR_AM_PREFIX + "scheduler.heartbeat.interval-ms";
public static final int DEFAULT_MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS = 1000;
/**
* If contact with RM is lost, the AM will wait MR_AM_TO_RM_WAIT_INTERVAL_MS
* milliseconds before aborting. During this interval, AM will still try
* to contact the RM.
*/
public static final String MR_AM_TO_RM_WAIT_INTERVAL_MS =
MR_AM_PREFIX + "scheduler.connection.wait.interval-ms";
public static final int DEFAULT_MR_AM_TO_RM_WAIT_INTERVAL_MS = 360000;
/**
* How long to wait in milliseconds for the output committer to cancel
* an operation when the job is being killed
*/
public static final String MR_AM_COMMITTER_CANCEL_TIMEOUT_MS =
MR_AM_PREFIX + "job.committer.cancel-timeout";
public static final int DEFAULT_MR_AM_COMMITTER_CANCEL_TIMEOUT_MS =
60 * 1000;
/**
* Defines a time window in milliseconds for output committer operations.
* If contact with the RM has occurred within this window then commit
* operations are allowed, otherwise the AM will not allow output committer
* operations until contact with the RM has been re-established.
*/
public static final String MR_AM_COMMIT_WINDOW_MS =
MR_AM_PREFIX + "job.committer.commit-window";
public static final int DEFAULT_MR_AM_COMMIT_WINDOW_MS = 10 * 1000;
/**
* Boolean. Create the base dirs in the JobHistoryEventHandler
* Set to false for multi-user clusters. This is an internal config that
* is set by the MR framework and read by it too.
*/
public static final String MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR =
MR_AM_PREFIX + "create-intermediate-jh-base-dir";
public static final String MR_AM_HISTORY_MAX_UNFLUSHED_COMPLETE_EVENTS =
MR_AM_PREFIX + "history.max-unflushed-events";
public static final int DEFAULT_MR_AM_HISTORY_MAX_UNFLUSHED_COMPLETE_EVENTS =
200;
public static final String MR_AM_HISTORY_JOB_COMPLETE_UNFLUSHED_MULTIPLIER =
MR_AM_PREFIX + "history.job-complete-unflushed-multiplier";
public static final int DEFAULT_MR_AM_HISTORY_JOB_COMPLETE_UNFLUSHED_MULTIPLIER =
30;
public static final String MR_AM_HISTORY_COMPLETE_EVENT_FLUSH_TIMEOUT_MS =
MR_AM_PREFIX + "history.complete-event-flush-timeout";
public static final long DEFAULT_MR_AM_HISTORY_COMPLETE_EVENT_FLUSH_TIMEOUT_MS =
30 * 1000l;
public static final String MR_AM_HISTORY_USE_BATCHED_FLUSH_QUEUE_SIZE_THRESHOLD =
MR_AM_PREFIX + "history.use-batched-flush.queue-size.threshold";
public static final int DEFAULT_MR_AM_HISTORY_USE_BATCHED_FLUSH_QUEUE_SIZE_THRESHOLD =
50;
public static final String MR_AM_HARD_KILL_TIMEOUT_MS =
MR_AM_PREFIX + "hard-kill-timeout-ms";
public static final long DEFAULT_MR_AM_HARD_KILL_TIMEOUT_MS =
10 * 1000l;
/**
* Duration to wait before forcibly preempting a reducer to allow
* allocating new mappers, even when YARN reports positive headroom.
*/
public static final String MR_JOB_REDUCER_UNCONDITIONAL_PREEMPT_DELAY_SEC =
"mapreduce.job.reducer.unconditional-preempt.delay.sec";
public static final int
DEFAULT_MR_JOB_REDUCER_UNCONDITIONAL_PREEMPT_DELAY_SEC = 5 * 60;
/**
* Duration to wait before preempting a reducer, when there is no headroom
* to allocate new mappers.
*/
public static final String MR_JOB_REDUCER_PREEMPT_DELAY_SEC =
"mapreduce.job.reducer.preempt.delay.sec";
public static final int DEFAULT_MR_JOB_REDUCER_PREEMPT_DELAY_SEC = 0;
public static final String MR_AM_ENV =
MR_AM_PREFIX + "env";
public static final String MR_AM_ADMIN_USER_ENV =
MR_AM_PREFIX + "admin.user.env";
// although the AM admin user env default should be the same as the task user
// env default, there are problems in making it work on Windows currently
// MAPREDUCE-6588 should address the issue and set it to a proper non-empty
// value
public static final String DEFAULT_MR_AM_ADMIN_USER_ENV =
Shell.WINDOWS ?
"" :
"LD_LIBRARY_PATH=" + Apps.crossPlatformify("HADOOP_COMMON_HOME") +
"/lib/native";
public static final String MR_AM_PROFILE = MR_AM_PREFIX + "profile";
public static final boolean DEFAULT_MR_AM_PROFILE = false;
public static final String MR_AM_PROFILE_PARAMS = MR_AM_PREFIX
+ "profile.params";
public static final String MAPRED_MAP_ADMIN_JAVA_OPTS =
"mapreduce.admin.map.child.java.opts";
public static final String MAPRED_REDUCE_ADMIN_JAVA_OPTS =
"mapreduce.admin.reduce.child.java.opts";
public static final String DEFAULT_MAPRED_ADMIN_JAVA_OPTS =
"-Djava.net.preferIPv4Stack=true " +
"-Dhadoop.metrics.log.level=WARN ";
public static final String MAPRED_ADMIN_USER_SHELL =
"mapreduce.admin.user.shell";
public static final String DEFAULT_SHELL = "/bin/bash";
public static final String MAPRED_ADMIN_USER_ENV =
"mapreduce.admin.user.env";
// the "%...%" macros can be expanded prematurely and are probably not OK
// this should be addressed by MAPREDUCE-6588
public static final String DEFAULT_MAPRED_ADMIN_USER_ENV =
Shell.WINDOWS ?
"PATH=%PATH%;%HADOOP_COMMON_HOME%\\bin" :
"LD_LIBRARY_PATH=" + Apps.crossPlatformify("HADOOP_COMMON_HOME") +
"/lib/native";
public static final String WORKDIR = "work";
public static final String OUTPUT = "output";
public static final String HADOOP_WORK_DIR = "HADOOP_WORK_DIR";
// Environment variables used by Pipes. (TODO: these
// do not appear to be used by current pipes source code!)
public static final String STDOUT_LOGFILE_ENV = "STDOUT_LOGFILE_ENV";
public static final String STDERR_LOGFILE_ENV = "STDERR_LOGFILE_ENV";
// This should be the directory where splits file gets localized on the node
// running ApplicationMaster.
public static final String JOB_SUBMIT_DIR = "jobSubmitDir";
// This should be the name of the localized job-configuration file on the node
// running ApplicationMaster and Task
public static final String JOB_CONF_FILE = "job.xml";
// This should be the name of the localized job-jar file on the node running
// individual containers/tasks.
public static final String JOB_JAR = "job.jar";
public static final String JOB_SPLIT = "job.split";
public static final String JOB_SPLIT_METAINFO = "job.splitmetainfo";
public static final String APPLICATION_MASTER_CLASS =
"org.apache.hadoop.mapreduce.v2.app.MRAppMaster";
public static final String MAPREDUCE_V2_CHILD_CLASS =
"org.apache.hadoop.mapred.YarnChild";
public static final String APPLICATION_ATTEMPT_ID =
"mapreduce.job.application.attempt.id";
/**
* Job end notification.
*/
public static final String MR_JOB_END_NOTIFICATION_URL =
"mapreduce.job.end-notification.url";
public static final String MR_JOB_END_NOTIFICATION_PROXY =
"mapreduce.job.end-notification.proxy";
public static final String MR_JOB_END_NOTIFICATION_TIMEOUT =
"mapreduce.job.end-notification.timeout";
public static final String MR_JOB_END_RETRY_ATTEMPTS =
"mapreduce.job.end-notification.retry.attempts";
public static final String MR_JOB_END_RETRY_INTERVAL =
"mapreduce.job.end-notification.retry.interval";
public static final String MR_JOB_END_NOTIFICATION_MAX_ATTEMPTS =
"mapreduce.job.end-notification.max.attempts";
public static final String MR_JOB_END_NOTIFICATION_MAX_RETRY_INTERVAL =
"mapreduce.job.end-notification.max.retry.interval";
public static final int DEFAULT_MR_JOB_END_NOTIFICATION_TIMEOUT =
5000;
/*
* MR AM Service Authorization
*/
public static final String
MR_AM_SECURITY_SERVICE_AUTHORIZATION_TASK_UMBILICAL =
"security.job.task.protocol.acl";
public static final String
MR_AM_SECURITY_SERVICE_AUTHORIZATION_CLIENT =
"security.job.client.protocol.acl";
/**
* CLASSPATH for all YARN MapReduce applications.
*/
public static final String MAPREDUCE_APPLICATION_CLASSPATH =
"mapreduce.application.classpath";
public static final String MAPREDUCE_JOB_LOG4J_PROPERTIES_FILE =
"mapreduce.job.log4j-properties-file";
/**
* Path to MapReduce framework archive
*/
public static final String MAPREDUCE_APPLICATION_FRAMEWORK_PATH =
"mapreduce.application.framework.path";
/**
* Default CLASSPATH for all YARN MapReduce applications constructed with
* platform-agnostic syntax.
*/
@Public
@Unstable
public final String DEFAULT_MAPREDUCE_CROSS_PLATFORM_APPLICATION_CLASSPATH = Apps
.crossPlatformify("HADOOP_MAPRED_HOME")
+ "/share/hadoop/mapreduce/*,"
+ Apps.crossPlatformify("HADOOP_MAPRED_HOME")
+ "/share/hadoop/mapreduce/lib/*";
/**
* Default platform-specific CLASSPATH for all YARN MapReduce applications
* constructed based on client OS syntax.
* <p>
* Note: Use {@link DEFAULT_MAPREDUCE_CROSS_PLATFORM_APPLICATION_CLASSPATH}
* for cross-platform practice i.e. submit an application from a Windows
* client to a Linux/Unix server or vice versa.
* </p>
*/
public final String DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH =
Shell.WINDOWS ? "%HADOOP_MAPRED_HOME%\\share\\hadoop\\mapreduce\\*,"
+ "%HADOOP_MAPRED_HOME%\\share\\hadoop\\mapreduce\\lib\\*"
: "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,"
+ "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*";
public static final String WORKFLOW_ID = "mapreduce.workflow.id";
public static final String TASK_LOG_BACKUPS =
MR_PREFIX + "task.container.log.backups";
public static final int DEFAULT_TASK_LOG_BACKUPS = 0; // don't roll
public static final String REDUCE_SEPARATE_SHUFFLE_LOG =
MR_PREFIX + "shuffle.log.separate";
public static final boolean DEFAULT_REDUCE_SEPARATE_SHUFFLE_LOG = true;
public static final String SHUFFLE_LOG_BACKUPS =
MR_PREFIX + "shuffle.log.backups";
public static final int DEFAULT_SHUFFLE_LOG_BACKUPS = 0; // don't roll
public static final String SHUFFLE_LOG_KB =
MR_PREFIX + "shuffle.log.limit.kb";
public static final long DEFAULT_SHUFFLE_LOG_KB = 0L;
public static final String WORKFLOW_NAME = "mapreduce.workflow.name";
public static final String WORKFLOW_NODE_NAME =
"mapreduce.workflow.node.name";
public static final String WORKFLOW_ADJACENCY_PREFIX_STRING =
"mapreduce.workflow.adjacency.";
public static final String WORKFLOW_ADJACENCY_PREFIX_PATTERN =
"^mapreduce\\.workflow\\.adjacency\\..+";
public static final String WORKFLOW_TAGS = "mapreduce.workflow.tags";
/**
* The maximum number of application attempts.
* It is a application-specific setting.
*/
public static final String MR_AM_MAX_ATTEMPTS = "mapreduce.am.max-attempts";
public static final int DEFAULT_MR_AM_MAX_ATTEMPTS = 2;
public static final String MR_APPLICATION_TYPE = "MAPREDUCE";
public static final String MR_ENCRYPTED_INTERMEDIATE_DATA =
"mapreduce.job.encrypted-intermediate-data";
public static final boolean DEFAULT_MR_ENCRYPTED_INTERMEDIATE_DATA = false;
public static final String MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS =
"mapreduce.job.encrypted-intermediate-data-key-size-bits";
public static final int DEFAULT_MR_ENCRYPTED_INTERMEDIATE_DATA_KEY_SIZE_BITS =
128;
public static final String MR_ENCRYPTED_INTERMEDIATE_DATA_BUFFER_KB =
"mapreduce.job.encrypted-intermediate-data.buffer.kb";
public static final int DEFAULT_MR_ENCRYPTED_INTERMEDIATE_DATA_BUFFER_KB =
128;
}
| |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.demo;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import android.content.Intent;
import android.net.Uri;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.offline.DownloadRequest;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
/** Util to read from and populate an intent. */
public class IntentUtil {
/** A tag to hold custom playback configuration attributes. */
public static class Tag {
/** Whether the stream is a live stream. */
public final boolean isLive;
/** The spherical stereo mode or null. */
@Nullable public final String sphericalStereoMode;
/** Creates an instance. */
public Tag(boolean isLive, @Nullable String sphericalStereoMode) {
this.isLive = isLive;
this.sphericalStereoMode = sphericalStereoMode;
}
}
// Actions.
public static final String ACTION_VIEW = "com.google.android.exoplayer.demo.action.VIEW";
public static final String ACTION_VIEW_LIST =
"com.google.android.exoplayer.demo.action.VIEW_LIST";
// Activity extras.
public static final String SPHERICAL_STEREO_MODE_EXTRA = "spherical_stereo_mode";
public static final String SPHERICAL_STEREO_MODE_MONO = "mono";
public static final String SPHERICAL_STEREO_MODE_TOP_BOTTOM = "top_bottom";
public static final String SPHERICAL_STEREO_MODE_LEFT_RIGHT = "left_right";
// Player configuration extras.
public static final String ABR_ALGORITHM_EXTRA = "abr_algorithm";
public static final String ABR_ALGORITHM_DEFAULT = "default";
public static final String ABR_ALGORITHM_RANDOM = "random";
// Media item configuration extras.
public static final String URI_EXTRA = "uri";
public static final String IS_LIVE_EXTRA = "is_live";
public static final String MIME_TYPE_EXTRA = "mime_type";
// For backwards compatibility only.
public static final String EXTENSION_EXTRA = "extension";
public static final String DRM_SCHEME_EXTRA = "drm_scheme";
public static final String DRM_LICENSE_URL_EXTRA = "drm_license_url";
public static final String DRM_KEY_REQUEST_PROPERTIES_EXTRA = "drm_key_request_properties";
public static final String DRM_SESSION_FOR_CLEAR_TYPES_EXTRA = "drm_session_for_clear_types";
public static final String DRM_MULTI_SESSION_EXTRA = "drm_multi_session";
public static final String DRM_FORCE_DEFAULT_LICENSE_URI_EXTRA = "drm_force_default_license_uri";
public static final String AD_TAG_URI_EXTRA = "ad_tag_uri";
public static final String SUBTITLE_URI_EXTRA = "subtitle_uri";
public static final String SUBTITLE_MIME_TYPE_EXTRA = "subtitle_mime_type";
public static final String SUBTITLE_LANGUAGE_EXTRA = "subtitle_language";
// For backwards compatibility only.
public static final String DRM_SCHEME_UUID_EXTRA = "drm_scheme_uuid";
public static final String PREFER_EXTENSION_DECODERS_EXTRA = "prefer_extension_decoders";
public static final String TUNNELING_EXTRA = "tunneling";
/** Creates a list of {@link MediaItem media items} from an {@link Intent}. */
public static List<MediaItem> createMediaItemsFromIntent(
Intent intent, DownloadTracker downloadTracker) {
List<MediaItem> mediaItems = new ArrayList<>();
if (ACTION_VIEW_LIST.equals(intent.getAction())) {
int index = 0;
while (intent.hasExtra(URI_EXTRA + "_" + index)) {
Uri uri = Uri.parse(intent.getStringExtra(URI_EXTRA + "_" + index));
mediaItems.add(
createMediaItemFromIntent(
uri,
intent,
/* extrasKeySuffix= */ "_" + index,
downloadTracker.getDownloadRequest(uri)));
index++;
}
} else {
Uri uri = intent.getData();
mediaItems.add(
createMediaItemFromIntent(
uri, intent, /* extrasKeySuffix= */ "", downloadTracker.getDownloadRequest(uri)));
}
return mediaItems;
}
/** Populates the intent with the given list of {@link MediaItem media items}. */
public static void addToIntent(List<MediaItem> mediaItems, Intent intent) {
Assertions.checkArgument(!mediaItems.isEmpty());
if (mediaItems.size() == 1) {
MediaItem.PlaybackProperties playbackProperties =
checkNotNull(mediaItems.get(0).playbackProperties);
intent.setAction(IntentUtil.ACTION_VIEW).setData(playbackProperties.uri);
addPlaybackPropertiesToIntent(playbackProperties, intent, /* extrasKeySuffix= */ "");
} else {
intent.setAction(IntentUtil.ACTION_VIEW_LIST);
for (int i = 0; i < mediaItems.size(); i++) {
MediaItem.PlaybackProperties playbackProperties =
checkNotNull(mediaItems.get(i).playbackProperties);
intent.putExtra(IntentUtil.URI_EXTRA + ("_" + i), playbackProperties.uri.toString());
addPlaybackPropertiesToIntent(playbackProperties, intent, /* extrasKeySuffix= */ "_" + i);
}
}
}
/** Makes a best guess to infer the MIME type from a {@link Uri} and an optional extension. */
@Nullable
public static String inferAdaptiveStreamMimeType(Uri uri, @Nullable String extension) {
@C.ContentType int contentType = Util.inferContentType(uri, extension);
switch (contentType) {
case C.TYPE_DASH:
return MimeTypes.APPLICATION_MPD;
case C.TYPE_HLS:
return MimeTypes.APPLICATION_M3U8;
case C.TYPE_SS:
return MimeTypes.APPLICATION_SS;
case C.TYPE_OTHER:
default:
return null;
}
}
private static MediaItem createMediaItemFromIntent(
Uri uri, Intent intent, String extrasKeySuffix, @Nullable DownloadRequest downloadRequest) {
String mimeType = intent.getStringExtra(MIME_TYPE_EXTRA + extrasKeySuffix);
if (mimeType == null) {
// Try to use extension for backwards compatibility.
String extension = intent.getStringExtra(EXTENSION_EXTRA + extrasKeySuffix);
mimeType = inferAdaptiveStreamMimeType(uri, extension);
}
MediaItem.Builder builder =
new MediaItem.Builder()
.setUri(uri)
.setStreamKeys(downloadRequest != null ? downloadRequest.streamKeys : null)
.setCustomCacheKey(downloadRequest != null ? downloadRequest.customCacheKey : null)
.setMimeType(mimeType)
.setAdTagUri(intent.getStringExtra(AD_TAG_URI_EXTRA + extrasKeySuffix))
.setSubtitles(createSubtitlesFromIntent(intent, extrasKeySuffix));
return populateDrmPropertiesFromIntent(builder, intent, extrasKeySuffix).build();
}
private static List<MediaItem.Subtitle> createSubtitlesFromIntent(
Intent intent, String extrasKeySuffix) {
if (!intent.hasExtra(SUBTITLE_URI_EXTRA + extrasKeySuffix)) {
return Collections.emptyList();
}
return Collections.singletonList(
new MediaItem.Subtitle(
Uri.parse(intent.getStringExtra(SUBTITLE_URI_EXTRA + extrasKeySuffix)),
checkNotNull(intent.getStringExtra(SUBTITLE_MIME_TYPE_EXTRA + extrasKeySuffix)),
intent.getStringExtra(SUBTITLE_LANGUAGE_EXTRA + extrasKeySuffix),
C.SELECTION_FLAG_DEFAULT));
}
private static MediaItem.Builder populateDrmPropertiesFromIntent(
MediaItem.Builder builder, Intent intent, String extrasKeySuffix) {
String schemeKey = DRM_SCHEME_EXTRA + extrasKeySuffix;
String schemeUuidKey = DRM_SCHEME_UUID_EXTRA + extrasKeySuffix;
if (!intent.hasExtra(schemeKey) && !intent.hasExtra(schemeUuidKey)) {
return builder;
}
String drmSchemeExtra =
intent.hasExtra(schemeKey)
? intent.getStringExtra(schemeKey)
: intent.getStringExtra(schemeUuidKey);
String[] drmSessionForClearTypesExtra =
intent.getStringArrayExtra(DRM_SESSION_FOR_CLEAR_TYPES_EXTRA + extrasKeySuffix);
Map<String, String> headers = new HashMap<>();
String[] keyRequestPropertiesArray =
intent.getStringArrayExtra(DRM_KEY_REQUEST_PROPERTIES_EXTRA + extrasKeySuffix);
if (keyRequestPropertiesArray != null) {
for (int i = 0; i < keyRequestPropertiesArray.length; i += 2) {
headers.put(keyRequestPropertiesArray[i], keyRequestPropertiesArray[i + 1]);
}
}
builder
.setDrmUuid(Util.getDrmUuid(Util.castNonNull(drmSchemeExtra)))
.setDrmLicenseUri(intent.getStringExtra(DRM_LICENSE_URL_EXTRA + extrasKeySuffix))
.setDrmSessionForClearTypes(toTrackTypeList(drmSessionForClearTypesExtra))
.setDrmMultiSession(
intent.getBooleanExtra(DRM_MULTI_SESSION_EXTRA + extrasKeySuffix, false))
.setDrmForceDefaultLicenseUri(
intent.getBooleanExtra(DRM_FORCE_DEFAULT_LICENSE_URI_EXTRA + extrasKeySuffix, false))
.setDrmLicenseRequestHeaders(headers);
return builder;
}
private static List<Integer> toTrackTypeList(@Nullable String[] trackTypeStringsArray) {
if (trackTypeStringsArray == null) {
return Collections.emptyList();
}
HashSet<Integer> trackTypes = new HashSet<>();
for (String trackTypeString : trackTypeStringsArray) {
switch (Util.toLowerInvariant(trackTypeString)) {
case "audio":
trackTypes.add(C.TRACK_TYPE_AUDIO);
break;
case "video":
trackTypes.add(C.TRACK_TYPE_VIDEO);
break;
default:
throw new IllegalArgumentException("Invalid track type: " + trackTypeString);
}
}
return new ArrayList<>(trackTypes);
}
private static void addPlaybackPropertiesToIntent(
MediaItem.PlaybackProperties playbackProperties, Intent intent, String extrasKeySuffix) {
boolean isLive = false;
String sphericalStereoMode = null;
if (playbackProperties.tag instanceof Tag) {
Tag tag = (Tag) playbackProperties.tag;
isLive = tag.isLive;
sphericalStereoMode = tag.sphericalStereoMode;
}
intent
.putExtra(MIME_TYPE_EXTRA + extrasKeySuffix, playbackProperties.mimeType)
.putExtra(
AD_TAG_URI_EXTRA + extrasKeySuffix,
playbackProperties.adTagUri != null ? playbackProperties.adTagUri.toString() : null)
.putExtra(IS_LIVE_EXTRA + extrasKeySuffix, isLive)
.putExtra(SPHERICAL_STEREO_MODE_EXTRA, sphericalStereoMode);
if (playbackProperties.drmConfiguration != null) {
addDrmConfigurationToIntent(playbackProperties.drmConfiguration, intent, extrasKeySuffix);
}
if (!playbackProperties.subtitles.isEmpty()) {
checkState(playbackProperties.subtitles.size() == 1);
MediaItem.Subtitle subtitle = playbackProperties.subtitles.get(0);
intent.putExtra(SUBTITLE_URI_EXTRA + extrasKeySuffix, subtitle.uri.toString());
intent.putExtra(SUBTITLE_MIME_TYPE_EXTRA + extrasKeySuffix, subtitle.mimeType);
intent.putExtra(SUBTITLE_LANGUAGE_EXTRA + extrasKeySuffix, subtitle.language);
}
}
private static void addDrmConfigurationToIntent(
MediaItem.DrmConfiguration drmConfiguration, Intent intent, String extrasKeySuffix) {
intent.putExtra(DRM_SCHEME_EXTRA + extrasKeySuffix, drmConfiguration.uuid.toString());
intent.putExtra(
DRM_LICENSE_URL_EXTRA + extrasKeySuffix,
checkNotNull(drmConfiguration.licenseUri).toString());
intent.putExtra(DRM_MULTI_SESSION_EXTRA + extrasKeySuffix, drmConfiguration.multiSession);
intent.putExtra(
DRM_FORCE_DEFAULT_LICENSE_URI_EXTRA + extrasKeySuffix,
drmConfiguration.forceDefaultLicenseUri);
String[] drmKeyRequestProperties = new String[drmConfiguration.requestHeaders.size() * 2];
int index = 0;
for (Map.Entry<String, String> entry : drmConfiguration.requestHeaders.entrySet()) {
drmKeyRequestProperties[index++] = entry.getKey();
drmKeyRequestProperties[index++] = entry.getValue();
}
intent.putExtra(DRM_KEY_REQUEST_PROPERTIES_EXTRA + extrasKeySuffix, drmKeyRequestProperties);
ArrayList<String> typeStrings = new ArrayList<>();
for (int type : drmConfiguration.sessionForClearTypes) {
// Only audio and video are supported.
Assertions.checkState(type == C.TRACK_TYPE_AUDIO || type == C.TRACK_TYPE_VIDEO);
typeStrings.add(type == C.TRACK_TYPE_AUDIO ? "audio" : "video");
}
intent.putExtra(
DRM_SESSION_FOR_CLEAR_TYPES_EXTRA + extrasKeySuffix, typeStrings.toArray(new String[0]));
}
}
| |
package us.kbase.kbasenetworks;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Generated;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
/**
* <p>Original spec-file type: Edge</p>
* <pre>
* Represents an edge in a network.
* string id - A unique identifier of an edge
* string name - String representation of an edge. It should be a concise but informative representation that is easy for a person to read.
* string node_id1 - Identifier of the first node (source node, if the edge is directed) connected by a given edge
* string node_id2 - Identifier of the second node (target node, if the edge is directed) connected by a given edge
* boolean directed - Specify whether the edge is directed or not. 1 if it is directed, 0 if it is not directed
* float confidence - Value from 0 to 1 representing a probability that the interaction represented by a given edge is a true interaction
* float strength - Value from 0 to 1 representing a strength of an interaction represented by a given edge
* string dataset_id - The identifier of a dataset that provided an interaction represented by a given edge
* mapping<string,string> properties - Other edge properties
* mapping<string,string> user_annotations - User annotations of an edge
* </pre>
*
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("com.googlecode.jsonschema2pojo")
@JsonPropertyOrder({
"id",
"name",
"node_id1",
"node_id2",
"directed",
"confidence",
"strength",
"dataset_id",
"properties",
"user_annotations"
})
public class Edge {
@JsonProperty("id")
private java.lang.String id;
@JsonProperty("name")
private java.lang.String name;
@JsonProperty("node_id1")
private java.lang.String nodeId1;
@JsonProperty("node_id2")
private java.lang.String nodeId2;
@JsonProperty("directed")
private java.lang.String directed;
@JsonProperty("confidence")
private Double confidence;
@JsonProperty("strength")
private Double strength;
@JsonProperty("dataset_id")
private java.lang.String datasetId;
@JsonProperty("properties")
private Map<String, String> properties;
@JsonProperty("user_annotations")
private Map<String, String> userAnnotations;
private Map<java.lang.String, Object> additionalProperties = new HashMap<java.lang.String, Object>();
@JsonProperty("id")
public java.lang.String getId() {
return id;
}
@JsonProperty("id")
public void setId(java.lang.String id) {
this.id = id;
}
public Edge withId(java.lang.String id) {
this.id = id;
return this;
}
@JsonProperty("name")
public java.lang.String getName() {
return name;
}
@JsonProperty("name")
public void setName(java.lang.String name) {
this.name = name;
}
public Edge withName(java.lang.String name) {
this.name = name;
return this;
}
@JsonProperty("node_id1")
public java.lang.String getNodeId1() {
return nodeId1;
}
@JsonProperty("node_id1")
public void setNodeId1(java.lang.String nodeId1) {
this.nodeId1 = nodeId1;
}
public Edge withNodeId1(java.lang.String nodeId1) {
this.nodeId1 = nodeId1;
return this;
}
@JsonProperty("node_id2")
public java.lang.String getNodeId2() {
return nodeId2;
}
@JsonProperty("node_id2")
public void setNodeId2(java.lang.String nodeId2) {
this.nodeId2 = nodeId2;
}
public Edge withNodeId2(java.lang.String nodeId2) {
this.nodeId2 = nodeId2;
return this;
}
@JsonProperty("directed")
public java.lang.String getDirected() {
return directed;
}
@JsonProperty("directed")
public void setDirected(java.lang.String directed) {
this.directed = directed;
}
public Edge withDirected(java.lang.String directed) {
this.directed = directed;
return this;
}
@JsonProperty("confidence")
public Double getConfidence() {
return confidence;
}
@JsonProperty("confidence")
public void setConfidence(Double confidence) {
this.confidence = confidence;
}
public Edge withConfidence(Double confidence) {
this.confidence = confidence;
return this;
}
@JsonProperty("strength")
public Double getStrength() {
return strength;
}
@JsonProperty("strength")
public void setStrength(Double strength) {
this.strength = strength;
}
public Edge withStrength(Double strength) {
this.strength = strength;
return this;
}
@JsonProperty("dataset_id")
public java.lang.String getDatasetId() {
return datasetId;
}
@JsonProperty("dataset_id")
public void setDatasetId(java.lang.String datasetId) {
this.datasetId = datasetId;
}
public Edge withDatasetId(java.lang.String datasetId) {
this.datasetId = datasetId;
return this;
}
@JsonProperty("properties")
public Map<String, String> getProperties() {
return properties;
}
@JsonProperty("properties")
public void setProperties(Map<String, String> properties) {
this.properties = properties;
}
public Edge withProperties(Map<String, String> properties) {
this.properties = properties;
return this;
}
@JsonProperty("user_annotations")
public Map<String, String> getUserAnnotations() {
return userAnnotations;
}
@JsonProperty("user_annotations")
public void setUserAnnotations(Map<String, String> userAnnotations) {
this.userAnnotations = userAnnotations;
}
public Edge withUserAnnotations(Map<String, String> userAnnotations) {
this.userAnnotations = userAnnotations;
return this;
}
@JsonAnyGetter
public Map<java.lang.String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperties(java.lang.String name, Object value) {
this.additionalProperties.put(name, value);
}
@Override
public java.lang.String toString() {
return ((((((((((((((((((((((("Edge"+" [id=")+ id)+", name=")+ name)+", nodeId1=")+ nodeId1)+", nodeId2=")+ nodeId2)+", directed=")+ directed)+", confidence=")+ confidence)+", strength=")+ strength)+", datasetId=")+ datasetId)+", properties=")+ properties)+", userAnnotations=")+ userAnnotations)+", additionalProperties=")+ additionalProperties)+"]");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* Copyright 2013 Josh Elser
*
*/
package cosmos.mapred;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.accumulo.core.client.BatchScanner;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.ZooKeeperInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.security.Authorizations;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.mediawiki.xml.export_0.MediaWikiType;
import org.mediawiki.xml.export_0.PageType;
import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.protobuf.InvalidProtocolBufferException;
import cosmos.Cosmos;
import cosmos.impl.CosmosImpl;
import cosmos.mediawiki.MediawikiPage.Page;
import cosmos.mediawiki.MediawikiPage.Page.Revision;
import cosmos.mediawiki.MediawikiPage.Page.Revision.Contributor;
import cosmos.options.Defaults;
import cosmos.options.Index;
import cosmos.records.impl.MultimapRecord;
import cosmos.records.values.RecordValue;
import cosmos.results.CloseableIterable;
import cosmos.results.Column;
import cosmos.results.integration.CosmosIntegrationSetup;
import cosmos.store.Store;
import cosmos.util.IdentitySet;
/**
*
*/
public class MediawikiQueries {
public static final boolean preloadData = false;
public static final String TIMINGS = "[TIMINGS] ";
public static final int MAX_SIZE = 8000;
// MAX_OFFSET is a little misleading because the max pageID is 33928886
// Don't have contiguous pageIDs
public static final int MAX_OFFSET = 11845576 - MAX_SIZE;
public static final int MAX_ROW = 999999999;
public static final ColumnVisibility cv = new ColumnVisibility("en");
public static final Column PAGE_ID = Column.create("PAGE_ID"), REVISION_ID = Column.create("REVISION_ID"), REVISION_TIMESTAMP = Column
.create("REVISION_TIMESTAMP"), CONTRIBUTOR_USERNAME = Column.create("CONTRIBUTOR_USERNAME"), CONTRIBUTOR_ID = Column.create("CONTRIBUTOR_ID");
public static void logTiming(long numResults, long duration, String action) {
System.err.println(TIMINGS + numResults + " " + duration + " " + action);
}
public static MultimapRecord pagesToQueryResult(Page p) {
HashMultimap<Column,RecordValue<?>> data = HashMultimap.create();
String pageId = Long.toString(p.getId());
data.put(PAGE_ID, RecordValue.create(pageId, cv));
Revision r = p.getRevision();
if (null != r) {
data.put(REVISION_ID, RecordValue.create(Long.toString(r.getId()), cv));
data.put(REVISION_TIMESTAMP, RecordValue.create(r.getTimestamp(), cv));
Contributor c = r.getContributor();
if (null != c) {
if (null != c.getUsername()) {
data.put(CONTRIBUTOR_USERNAME, RecordValue.create(c.getUsername(), cv));
}
if (0l != c.getId()) {
data.put(CONTRIBUTOR_ID, RecordValue.create(Long.toString(c.getId()), cv));
}
}
}
return new MultimapRecord(data, pageId, cv);
}
protected final Connector con;
protected final Cosmos sorts;
public MediawikiQueries() throws Exception {
ZooKeeperInstance zk = new ZooKeeperInstance("accumulo", "localhost");
this.con = zk.getConnector("root", new PasswordToken("secret"));
this.sorts = new CosmosImpl("localhost");
}
public void run(int numIterations) throws Exception {
final Random offsetR = new Random(), cardinalityR = new Random();
int iters = 0;
while (iters < numIterations) {
Store id = Store.create(this.con, this.con.securityOperations().getUserAuthorizations(this.con.whoami()), IdentitySet.<Index> create());
int offset = offsetR.nextInt(MAX_OFFSET);
int numRecords = cardinalityR.nextInt(MAX_SIZE) + 1;
BatchScanner bs = this.con.createBatchScanner("sortswiki", new Authorizations(), 4);
bs.setRanges(Collections.singleton(new Range(Integer.toString(offset), Integer.toString(MAX_ROW))));
Iterable<Entry<Key,Value>> inputIterable = Iterables.limit(bs, numRecords);
this.sorts.register(id);
System.out.println(Thread.currentThread().getName() + ": " + id.uuid() + " - Iteration " + iters);
long recordsReturned = 0l;
Function<Entry<Key,Value>,MultimapRecord> func = new Function<Entry<Key,Value>,MultimapRecord>() {
@Override
public MultimapRecord apply(Entry<Key,Value> input) {
Page p;
try {
p = Page.parseFrom(input.getValue().get());
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
return pagesToQueryResult(p);
}
};
Map<Column,Long> counts = Maps.newHashMap();
ArrayList<MultimapRecord> tformSource = Lists.newArrayListWithCapacity(20000);
Stopwatch sw = new Stopwatch();
Stopwatch tformSw = new Stopwatch();
for (Entry<Key,Value> input : inputIterable) {
tformSw.start();
MultimapRecord r = func.apply(input);
tformSource.add(r);
tformSw.stop();
loadCountsForRecord(counts, r);
recordsReturned++;
}
sw.start();
this.sorts.addResults(id, tformSource);
sw.stop();
long actualNumResults = tformSource.size();
System.out.println(Thread.currentThread().getName() + ": Took " + tformSw + " transforming and " + sw + " to store " + recordsReturned + " records");
logTiming(actualNumResults, tformSw.elapsed(TimeUnit.MILLISECONDS), "transformInput");
logTiming(actualNumResults, sw.elapsed(TimeUnit.MILLISECONDS), "ingest");
bs.close();
Random r = new Random();
int max = r.nextInt(10) + 1;
// Run a bunch of queries
for (int count = 0; count < max; count++) {
long resultCount;
String name;
int i = r.nextInt(9);
if (0 == i) {
resultCount = docIdFetch(id, counts, actualNumResults);
name = "docIdFetch";
} else if (1 == i) {
resultCount = columnFetch(id, REVISION_ID, counts, actualNumResults);
name = "revisionIdFetch";
} else if (2 == i) {
resultCount = columnFetch(id, PAGE_ID, counts, actualNumResults);
name = "pageIdFetch";
} else if (3 == i) {
groupBy(id, REVISION_ID, counts, actualNumResults);
// no sense to verify here
resultCount = recordsReturned;
name = "groupByRevisionId";
} else if (4 == i) {
groupBy(id, PAGE_ID, counts, actualNumResults);
// no sense to verify here
resultCount = recordsReturned;
name = "groupByRevisionId";
} else if (5 == i) {
resultCount = columnFetch(id, CONTRIBUTOR_USERNAME, counts, actualNumResults);
name = "contributorUsernameFetch";
} else if (6 == i) {
groupBy(id, CONTRIBUTOR_USERNAME, counts, actualNumResults);
// no sense to verify here
resultCount = recordsReturned;
name = "groupByContributorUsername";
} else if (7 == i) {
resultCount = columnFetch(id, CONTRIBUTOR_ID, counts, actualNumResults);
name = "contributorIdFetch";
} else {//if (8 == i) {
groupBy(id, CONTRIBUTOR_ID, counts, actualNumResults);
// no sense to verify here
resultCount = recordsReturned;
name = "groupByContributorID";
}
}
System.out.println(Thread.currentThread().getName() + ": not deleting " + id );
// Delete the results
sw = new Stopwatch();
sw.start();
this.sorts.delete(id);
sw.stop();
System.out.println(Thread.currentThread().getName() + ": Took " + sw.toString() + " to delete results");
logTiming(actualNumResults, sw.elapsed(TimeUnit.MILLISECONDS), "deleteResults");
iters++;
}
this.sorts.close();
}
public void loadCountsForRecord(Map<Column,Long> counts, MultimapRecord r) {
for (Entry<Column,RecordValue<?>> entry : r.columnValues()) {
Column c = entry.getKey();
if (counts.containsKey(c)) {
counts.put(c, counts.get(c)+1);
} else {
counts.put(c, 1l);
}
}
}
public long docIdFetch(Store id, Map<Column,Long> counts, long totalResults) throws Exception {
Stopwatch sw = new Stopwatch();
// This is dumb, I didn't pad the docids...
String prev = "!";
long resultCount = 0l;
sw.start();
final CloseableIterable<MultimapRecord> results = this.sorts.fetch(id, Index.define(Defaults.DOCID_FIELD_NAME));
for (MultimapRecord r : results) {
sw.stop();
resultCount++;
String current = r.docId();
if (prev.compareTo(current) > 0) {
System.out.println("WOAH, got " + current + " docid which was greater than the previous " + prev);
results.close();
System.exit(1);
}
prev = current;
sw.start();
}
sw.stop();
System.out.println(Thread.currentThread().getName() + ": docIdFetch - Took " + sw.toString() + " to fetch results");
logTiming(totalResults, sw.elapsed(TimeUnit.MILLISECONDS), "docIdFetch");
results.close();
return resultCount;
}
public long columnFetch(Store id, Column colToFetch, Map<Column,Long> counts, long totalResults) throws Exception {
Stopwatch sw = new Stopwatch();
String prev = null;
String lastDocId = null;
long resultCount = 0l;
sw.start();
final CloseableIterable<MultimapRecord> results = this.sorts.fetch(id, Index.define(colToFetch));
Iterator<MultimapRecord> resultsIter = results.iterator();
for (; resultsIter.hasNext();) {
MultimapRecord r = resultsIter.next();
sw.stop();
resultCount++;
Collection<RecordValue<?>> values = r.get(colToFetch);
TreeSet<RecordValue<?>> sortedValues = Sets.newTreeSet(values);
if (null == prev) {
prev = sortedValues.first().value().toString();
} else {
boolean plausible = false;
Iterator<RecordValue<?>> iter = sortedValues.iterator();
for (; !plausible && iter.hasNext();) {
String val = iter.next().value().toString();
if (prev.compareTo(val) <= 0) {
plausible = true;
}
}
if (!plausible) {
System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - " + lastDocId + " shouldn't have come before " + r.docId());
System.out.println(prev + " compared to " + sortedValues);
results.close();
System.exit(1);
}
}
lastDocId = r.docId();
sw.start();
}
sw.stop();
System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Took " + sw.toString() + " to fetch results");
logTiming(totalResults, sw.elapsed(TimeUnit.MILLISECONDS), "fetch:" + colToFetch);
results.close();
long expected = counts.containsKey(colToFetch) ? counts.get(colToFetch) : -1;
if (resultCount != expected) {
System.out.println(Thread.currentThread().getName() + " " + colToFetch + ": Expected to get " + expected + " records but got " + resultCount);
System.exit(1);
}
return resultCount;
}
public void groupBy(Store id, Column colToFetch, Map<Column,Long> columnCounts, long totalResults) throws Exception {
Stopwatch sw = new Stopwatch();
sw.start();
final CloseableIterable<Entry<RecordValue<?>,Long>> results = this.sorts.groupResults(id, colToFetch);
TreeMap<RecordValue<?>,Long> counts = Maps.newTreeMap();
for (Entry<RecordValue<?>,Long> entry : results) {
counts.put(entry.getKey(), entry.getValue());
}
results.close();
sw.stop();
System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Took " + sw.toString() + " to group results");
logTiming(totalResults, sw.elapsed(TimeUnit.MILLISECONDS), "groupBy:" + colToFetch);
// System.out.println(counts);
final CloseableIterable<MultimapRecord> verifyResults = this.sorts.fetch(id, Index.define(colToFetch));
TreeMap<RecordValue<?>,Long> records = Maps.newTreeMap();
for (MultimapRecord r : verifyResults) {
if (r.containsKey(colToFetch)) {
for (RecordValue<?> val : r.get(colToFetch)) {
if (records.containsKey(val)) {
records.put(val, records.get(val) + 1);
} else {
records.put(val, 1l);
}
}
}
}
verifyResults.close();
if (counts.size() != records.size()) {
System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Expected " + records.size() + " groups but found " + counts.size());
System.exit(1);
}
Set<RecordValue<?>> countKeys= counts.keySet(), recordKeys = records.keySet();
for (RecordValue<?> k : countKeys) {
if (!recordKeys.contains(k)) {
System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Expected to have count for " + k);
System.exit(1);
}
Long actual = counts.get(k), expected = records.get(k);
if (!actual.equals(expected)) {
System.out.println(Thread.currentThread().getName() + ": " + colToFetch + " - Expected " + expected + " value(s) but found " + actual + " value(s) for " + k.value());
System.exit(1);
}
}
}
public static Runnable runQueries(final int numQueries) {
return new Runnable() {
public void run() {
try {
(new MediawikiQueries()).run(numQueries);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
};
}
public static void main(String[] args) throws Exception {
if (preloadData) {
CosmosIntegrationSetup.initializeJaxb();
MediawikiQueries queries = new MediawikiQueries();
MediawikiMapper mapper = new MediawikiMapper();
mapper.setup(null);
List<MediaWikiType> results = Lists.newArrayList(CosmosIntegrationSetup.getWiki1(), CosmosIntegrationSetup.getWiki2(), CosmosIntegrationSetup.getWiki3(), CosmosIntegrationSetup.getWiki4(), CosmosIntegrationSetup.getWiki5());
try {
queries.con.tableOperations().create("sortswiki");
} catch (TableExistsException e) {
}
BatchWriter bw = queries.con.createBatchWriter("sortswiki", new BatchWriterConfig());
int i = 0;
for (MediaWikiType wiki : results) {
for (PageType pageType : wiki.getPage()) {
Page page = mapper.pageTypeToPage(pageType);
Value v = new Value(page.toByteArray());
Mutation m = new Mutation(Integer.toString(i));
m.put(new Text(), new Text(), v);
bw.addMutation(m);
i++;
}
bw.flush();
}
bw.close();
}
ExecutorService runner = Executors.newFixedThreadPool(3);
for (int i = 0; i < 4; i++) {
runner.execute(runQueries(200));
}
runner.shutdown();
runner.awaitTermination(Long.MAX_VALUE, TimeUnit.HOURS);
}
}
| |
/*
* Copyright (C) 2013 Fairphone Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fairphone.peaceofmind;
import com.flurry.android.FlurryAgent;
import org.fairphone.fairphonepeaceofmindapp.R;
import org.fairphone.peaceofmind.data.PeaceOfMindRun;
import org.fairphone.peaceofmind.data.PeaceOfMindStats;
import org.fairphone.peaceofmind.utils.FlurryHelper;
import org.fairphone.peaceofmind.widget.WidgetProvider;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.TaskStackBuilder;
import android.appwidget.AppWidgetManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.NotificationCompat;
public class PeaceOfMindBroadCastReceiver extends BroadcastReceiver {
private static final String TAG = PeaceOfMindBroadCastReceiver.class.getSimpleName();
private static final int PEACE_OF_MIND_ON_NOTIFICATION = 0;
private static final int PEACE_OF_MIND_INTERRUPTED_NOTIFICATION = 1;
private PeaceOfMindStats mCurrentStats;
private SharedPreferences mSharedPreferences;
private Context mContext;
private IDeviceController mDeviceController;
@Override
public void onReceive(Context context, Intent intent) {
mContext = context;
setupDeviceController();
String action = intent.getAction();
if (action != null) {
// obtains the piece of mind data from shared preferences
mSharedPreferences = PreferenceManager
.getDefaultSharedPreferences(context);
mCurrentStats = PeaceOfMindStats
.getStatsFromSharedPreferences(mSharedPreferences);
if (action.equals(PeaceOfMindActivity.UPDATE_PEACE_OF_MIND)) {
updateTargetTime(intent);
} else if (Intent.ACTION_AIRPLANE_MODE_CHANGED.equals(action)) {
//only react to this if the app is running
if(mCurrentStats.mIsOnPeaceOfMind){
Bundle extras = intent.getExtras();
//if the intent was sent by the system end Peace of mind
if(!extras.containsKey(AirplaneModeToggler.PEACE_OF_MIND_TOGGLE)){
endPeaceOfMind(true);
}
}
} else if (Intent.ACTION_SHUTDOWN.equals(action) && mCurrentStats.mIsOnPeaceOfMind) {
endPeaceOfMind(true);
} else {
performTimeTick();
}
} else {
return;
}
// update the widgets
updateWidget(context);
}
private void setupDeviceController() {
mDeviceController = new AirplaneModeDeviceController(mContext);
}
private void updateWidget(Context context) {
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(context);
int[] appWidgetIds = appWidgetManager.getAppWidgetIds(new ComponentName(context, WidgetProvider.class));
if (appWidgetIds.length > 0)
{
new WidgetProvider().onUpdate(context, appWidgetManager, appWidgetIds);
}
}
private void performTimeTick() {
long currentTime = System.currentTimeMillis();
long passedTime = 0;
if (mCurrentStats.mLastTimePinged != 0) {
if(mCurrentStats.mLastTimePinged >= currentTime){
endPeaceOfMind(false);
return;
}else{
passedTime += currentTime - mCurrentStats.mLastTimePinged;
}
}
mCurrentStats.mLastTimePinged = currentTime;
if (mCurrentStats.mIsOnPeaceOfMind) {
mCurrentStats.mCurrentRun.mPastTime += passedTime;
if(mCurrentStats.mCurrentRun.mPastTime >= mCurrentStats.mCurrentRun.mTargetTime){
endPeaceOfMind(false);
return;
}
}
PeaceOfMindStats.saveToSharedPreferences(mCurrentStats, mSharedPreferences);
// send broadcast to application receiver
if (mCurrentStats.mIsOnPeaceOfMind) {
Intent tickIntent = new Intent(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_TICK);
tickIntent.putExtra(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_TARGET_TIME, mCurrentStats.mCurrentRun.mTargetTime);
tickIntent.putExtra(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_PAST_TIME, mCurrentStats.mCurrentRun.mPastTime);
mContext.sendBroadcast(tickIntent);
}
}
private void startPeaceOfMind(long targetTime) {
long currentTime = System.currentTimeMillis();
mCurrentStats.mIsOnPeaceOfMind = true;
mCurrentStats.mLastTimePinged = currentTime;
mCurrentStats.mCurrentRun = new PeaceOfMindRun();
mCurrentStats.mCurrentRun.mTimeStarted = currentTime;
mCurrentStats.mCurrentRun.mPastTime = 0;
mCurrentStats.mCurrentRun.mTargetTime = targetTime;
PeaceOfMindStats.saveToSharedPreferences(mCurrentStats, mSharedPreferences);
mDeviceController.startPeaceOfMind();
// send broadcast to application receiver
Intent intent = new Intent(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_STARTED);
intent.putExtra(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_TARGET_TIME, targetTime);
mContext.sendBroadcast(intent);
}
private void updateTargetTime(Intent intent) {
long newTargetTime = intent.getExtras().getLong(
PeaceOfMindActivity.BROADCAST_TARGET_PEACE_OF_MIND);
if(newTargetTime == 0){
if(mCurrentStats.mIsOnPeaceOfMind){
endPeaceOfMind(false);
}
}else if (mCurrentStats.mIsOnPeaceOfMind) {
if (mCurrentStats.mCurrentRun.mPastTime < newTargetTime) {
mCurrentStats.mCurrentRun.mTargetTime = newTargetTime;
PeaceOfMindStats.saveToSharedPreferences(mCurrentStats, mSharedPreferences);
Intent updateIntent = new Intent(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_UPDATED);
updateIntent.putExtra(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_TARGET_TIME, mCurrentStats.mCurrentRun.mTargetTime);
updateIntent.putExtra(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_PAST_TIME, mCurrentStats.mCurrentRun.mPastTime);
mContext.sendBroadcast(updateIntent);
} else {
endPeaceOfMind(false);
}
}else {
startPeaceOfMind(newTargetTime);
setPeaceOfMindIconInNotificationBar(true, false);
}
}
/**
* Sets the Peace of mind icon on the notification bar
* @param putIcon if true the icon is put otherwise it is removed
* @param wasInterrupted when true, an extra notification is sent to inform the user that Peace of mind was ended
*/
private void setPeaceOfMindIconInNotificationBar(boolean putIcon, boolean wasInterrupted) {
NotificationManager manager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE);
if(putIcon){
//just in case the user didn't clear it
manager.cancel(PEACE_OF_MIND_INTERRUPTED_NOTIFICATION);
NotificationCompat.Builder builder =
new NotificationCompat.Builder(mContext)
.setSmallIcon(R.drawable.peace_system_bar_icon)
.setContentTitle(mContext.getResources().getString(R.string.app_name))
.setContentText(mContext.getResources().getString(R.string.peace_on_notification));
Intent resultIntent = new Intent(mContext, PeaceOfMindActivity.class);
TaskStackBuilder stackBuilder = TaskStackBuilder.create(mContext);
// Adds the back stack for the Intent (but not the Intent itself)
stackBuilder.addParentStack(PeaceOfMindActivity.class);
// Adds the Intent that starts the Activity to the top of the stack
stackBuilder.addNextIntent(resultIntent);
PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(0, PendingIntent.FLAG_UPDATE_CURRENT);
builder.setContentIntent(resultPendingIntent);
Notification notificationWhileRunnig = builder.build();
notificationWhileRunnig.flags |= Notification.FLAG_NO_CLEAR;
// Add notification
manager.notify(PEACE_OF_MIND_ON_NOTIFICATION, notificationWhileRunnig);
}else{
manager.cancel(PEACE_OF_MIND_ON_NOTIFICATION);
//send a notification saying that the peace was ended
if(wasInterrupted){
NotificationCompat.Builder builder =
new NotificationCompat.Builder(mContext)
.setSmallIcon(R.drawable.peace_system_bar_icon)
.setAutoCancel(true)
.setContentTitle(mContext.getResources().getString(R.string.app_name))
.setContentText(mContext.getResources().getString(R.string.peace_off_notification))
.setTicker(mContext.getResources().getString(R.string.peace_off_notification));
manager.notify(PEACE_OF_MIND_INTERRUPTED_NOTIFICATION, builder.build());
}
}
}
private void endPeaceOfMind(boolean wasInterrupted) {
mCurrentStats.mIsOnPeaceOfMind = false;
mCurrentStats.mLastTimePinged = 0;
if(mCurrentStats.mCurrentRun != null){
mCurrentStats.mCurrentRun.mTimeStarted = 0;
mCurrentStats.mCurrentRun.mPastTime = 0;
mCurrentStats.mCurrentRun.mTargetTime = 0;
mCurrentStats.mCurrentRun = null;
}
PeaceOfMindStats.saveToSharedPreferences(mCurrentStats, mSharedPreferences);
mDeviceController.endPeaceOfMind();
FlurryHelper.startFlurrySession(mContext);
FlurryAgent.endTimedEvent(FlurryHelper.PEACE_OF_MIND_STARTED);
if(wasInterrupted){
FlurryAgent.logEvent(FlurryHelper.PEACE_OF_MIND_STOPPED);
}
FlurryHelper.endFlurrySession(mContext);
Intent endIntent = new Intent(PeaceOfMindApplicationBroadcastReceiver.PEACE_OF_MIND_ENDED);
mContext.sendBroadcast(endIntent);
setPeaceOfMindIconInNotificationBar(false, wasInterrupted);
}
}
| |
/*
* Copyright 2007 Alin Dreghiciu.
* Copyright 2007 Peter Kriens.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.maven;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.ops4j.net.URLUtils;
import org.ops4j.pax.swissbox.bnd.BndUtils;
import org.ops4j.pax.swissbox.bnd.OverwriteMode;
/**
* Parser for wrap: protocol.
*
* @author Alin Dreghiciu
* @see Connection
* @since September 09, 2007
*/
public class WrapUrlParser
{
/**
* Syntax for the url; to be shown on exception messages.
*/
private static final String SYNTAX = "wrap:wrapped-jar-uri[,wrapping-instr-uri][$wrapping-instructions]";
/**
* Separator between wrapped jar url and instructions.
*/
private static final String INSTRUCTIONS_SEPARATOR = "$";
/**
* Separator between wrapped jar url and instructions file url.
*/
private static final String INSTRUCTIONS_FILE_SEPARATOR = ",";
/**
* Regexp pattern for matching jar, wrapping file and instructions.
*/
private static final Pattern SYNTAX_JAR_BND_INSTR =
Pattern.compile( "(.+?)" + INSTRUCTIONS_FILE_SEPARATOR + "(.+?)\\" + INSTRUCTIONS_SEPARATOR + "(.+?)" );
/**
* Regexp pattern for matching jar and instructions.
*/
private static final Pattern SYNTAX_JAR_INSTR =
Pattern.compile( "(.+?)\\" + INSTRUCTIONS_SEPARATOR + "(.+?)" );
/**
* Regexp pattern for matching jar and wrapping file.
*/
private static final Pattern SYNTAX_JAR_BND =
Pattern.compile( "(.+?)" + INSTRUCTIONS_FILE_SEPARATOR + "(.+?)" );
/**
* Wrapped jar URL.
*/
private final URL m_wrappedJarURL;
/**
* Wrapping instructions URL.
*/
private final Properties m_wrappingProperties;
/**
* Manifest overwrite mode.
*/
private final OverwriteMode m_overwriteMode;
/**
* Creates a new protocol parser.
*
* @param path the path part of the url (without starting wrap:)
*
* @throws MalformedURLException if provided path does not comply to expected syntax or has malformed urls
*/
public WrapUrlParser( final String path )
throws MalformedURLException
{
if( path == null || path.trim().length() == 0 )
{
throw new MalformedURLException( "Path cannot be null or empty. Syntax " + SYNTAX );
}
if( path.startsWith( INSTRUCTIONS_SEPARATOR ) || path.endsWith( INSTRUCTIONS_SEPARATOR ) )
{
throw new MalformedURLException(
"Path cannot start or end with " + INSTRUCTIONS_SEPARATOR + ". Syntax " + SYNTAX
);
}
m_wrappingProperties = new Properties();
Matcher matcher = SYNTAX_JAR_BND_INSTR.matcher( path );
if( matcher.matches() )
{
// we have all the parts
m_wrappedJarURL = new URL( matcher.group( 1 ) );
parseInstructionsFile( new URL( matcher.group( 2 ) ) );
m_wrappingProperties.putAll( BndUtils.parseInstructions( matcher.group( 3 ) ) );
}
else if( ( matcher = SYNTAX_JAR_INSTR.matcher( path ) ).matches() )
{
// we have a wrapped jar and instructions
m_wrappedJarURL = new URL( matcher.group( 1 ) );
m_wrappingProperties.putAll( BndUtils.parseInstructions( matcher.group( 2 ) ) );
}
else if( ( matcher = SYNTAX_JAR_BND.matcher( path ) ).matches() )
{
// we have a wrapped jar and a wrapping instructions file
m_wrappedJarURL = new URL( matcher.group( 1 ) );
parseInstructionsFile( new URL( matcher.group( 2 ) ) );
}
else
{
//we have only a wrapped jar
m_wrappedJarURL = new URL( path );
}
OverwriteMode overwriteMode;
try
{
overwriteMode = OverwriteMode.valueOf(
m_wrappingProperties.getProperty( "overwrite", OverwriteMode.KEEP.name() ).toUpperCase()
);
}
catch( Exception e )
{
overwriteMode = OverwriteMode.KEEP;
}
m_overwriteMode = overwriteMode;
}
/**
* Loads the properties out of an url.
*
* @param bndFileURL url of the file containing the instructions
*
* @throws MalformedURLException if the file could not be read
*/
private void parseInstructionsFile( final URL bndFileURL )
throws MalformedURLException
{
// TODO use the certificate check property from the handler instead of true bellow
try
{
InputStream is = null;
try
{
is = URLUtils.prepareInputStream( bndFileURL, true );
m_wrappingProperties.load( is );
}
finally
{
if( is != null )
{
is.close();
}
}
}
catch( IOException e )
{
throwAsMalformedURLException( "Could not retrieve the instructions from [" + bndFileURL + "]", e );
}
}
/**
* Returns the wrapped URL if present, null otherwise
*
* @return wrapped jar URL
*/
public URL getWrappedJarURL()
{
return m_wrappedJarURL;
}
/**
* Returns the wrapping instructions as Properties.
*
* @return wrapping instructions as Properties
*/
public Properties getWrappingProperties()
{
return m_wrappingProperties;
}
/**
* Returns the overwrite mode.
*
* @return overwrite mode
*/
public OverwriteMode getOverwriteMode()
{
return m_overwriteMode;
}
/**
* Creates an MalformedURLException with a message and a cause.
*
* @param message exception message
* @param cause exception cause
*
* @throws MalformedURLException the created MalformedURLException
*/
private static void throwAsMalformedURLException( final String message, final Exception cause )
throws MalformedURLException
{
final MalformedURLException exception = new MalformedURLException( message );
exception.initCause( cause );
throw exception;
}
}
| |
package com.ragdroid.rxify.logic;
import com.ragdroid.rxify.codelab.CodeLabContract;
import com.ragdroid.rxify.codelab.list.CodeLabListPresenter;
import com.ragdroid.rxify.codelab.misc.TimeTurnerPresenter;
import com.ragdroid.rxify.codelab.presenter2.RelayPresenter;
import com.ragdroid.rxify.codelab.presenter2.SubjectPresenter;
import com.ragdroid.rxify.codelab.presenter2.ThreadingPresenter;
import com.ragdroid.rxify.codelab.presenter2.AssignmentPresenter;
import com.ragdroid.rxify.codelab.presenter2.BattleFlowPresenter;
import com.ragdroid.rxify.codelab.presenter2.BattlePresenter;
import com.ragdroid.rxify.codelab.ChillPresenter;
import com.ragdroid.rxify.codelab.presenter2.DistinctPresenter;
import com.ragdroid.rxify.codelab.presenter2.FilterPresenter;
import com.ragdroid.rxify.codelab.presenter2.FlatMapPresenter;
import com.ragdroid.rxify.codelab.presenter2.MapPresenter;
import com.ragdroid.rxify.codelab.presenter2.ReducePresenter;
import com.ragdroid.rxify.codelab.presenter2.SkipPresenter;
import com.ragdroid.rxify.codelab.presenter2.TakePresenter;
import com.ragdroid.rxify.codelab.presenter2.TakeUntilPresenter;
import com.ragdroid.rxify.codelab.presenter.EmptyPresenter;
import com.ragdroid.rxify.codelab.presenter.ErrorPresenter;
import com.ragdroid.rxify.codelab.presenter.FromPresenter;
import com.ragdroid.rxify.codelab.presenter.IntervalPresenter;
import com.ragdroid.rxify.codelab.presenter.IntervalRangePresenter;
import com.ragdroid.rxify.codelab.presenter.JustPresenter;
import com.ragdroid.rxify.codelab.presenter.NeverPresenter;
import com.ragdroid.rxify.codelab.presenter.RangePresenter;
import com.ragdroid.rxify.codelab.presenter.TimerPresenter;
import com.ragdroid.rxify.core.BaseSchedulerProvider;
import com.ragdroid.rxify.dagger.ActivityScope;
import com.ragdroid.rxify.dagger.CLEnumKey;
import com.ragdroid.rxify.entity.CodeLabData;
import com.ragdroid.rxify.home.HomeContract;
import com.ragdroid.rxify.home.HomePresenter;
import com.ragdroid.rxify.library.LibraryContract;
import com.ragdroid.rxify.library.LibraryPresenter;
import com.ragdroid.rxify.zip.ZipContract;
import com.ragdroid.rxify.zip.ZipPresenter;
import javax.inject.Provider;
import dagger.Module;
import dagger.Provides;
import dagger.multibindings.IntoMap;
/**
* Created by garimajain on 05/11/16.
*/
@Module
public class ActivityModule {
@ActivityScope
@Provides
public HomeContract.Presenter provideHomePresenter(HomePresenter presenter) {
return presenter;
}
@ActivityScope
@Provides
public ZipContract.Presenter provideZipPresenter(ZipPresenter presenter) {
return presenter;
}
@ActivityScope
@Provides
public LibraryContract.Presenter provideLibraryPresenter(LibraryPresenter presenter) {
return presenter;
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.CHILL)
public static CodeLabContract.Presenter provideChillPresenter(ChillPresenter chillPresenter) {
return chillPresenter;
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.JUST)
public static CodeLabContract.Presenter provideJustPresenter(BaseSchedulerProvider provider) {
return new JustPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.EMPTY)
public static CodeLabContract.Presenter provideEmptyPresenter(BaseSchedulerProvider provider) {
return new EmptyPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.NEVER)
public static CodeLabContract.Presenter provideNeverPresenter(BaseSchedulerProvider provider) {
return new NeverPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.ERROR)
public static CodeLabContract.Presenter provideErrorPresenter(BaseSchedulerProvider provider) {
return new ErrorPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.RANGE)
public static CodeLabContract.Presenter provideRangePresenter(BaseSchedulerProvider provider) {
return new RangePresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.INTERVAL)
public static CodeLabContract.Presenter provideIntervalPresenter(BaseSchedulerProvider provider) {
return new IntervalPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.INTERVAL_RANGE)
public static CodeLabContract.Presenter provideIntervalRangePresenter(BaseSchedulerProvider provider) {
return new IntervalRangePresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.TIMER)
public static CodeLabContract.Presenter provideTimerPresenter(BaseSchedulerProvider provider) {
return new TimerPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.FROM)
public static CodeLabContract.Presenter provideFromPresenter(BaseSchedulerProvider provider) {
return new FromPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.FILTER)
public static CodeLabContract.Presenter provideFilterPresenter(BaseSchedulerProvider provider) {
return new FilterPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.DISTINCT)
public static CodeLabContract.Presenter provideDistinctPresenter(BaseSchedulerProvider provider) {
return new DistinctPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.TAKE)
public static CodeLabContract.Presenter provideTakePresenter(BaseSchedulerProvider provider) {
return new TakePresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.SKIP)
public static CodeLabContract.Presenter provideSkipPresenter(BaseSchedulerProvider provider) {
return new SkipPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.TAKE_UNTIL)
public static CodeLabContract.Presenter provideTakeUntilPresenter(BaseSchedulerProvider provider) {
return new TakeUntilPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.REDUCE)
public static CodeLabContract.Presenter provideReducePresenter(BaseSchedulerProvider provider) {
return new ReducePresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.MAP)
public static CodeLabContract.Presenter provideMapPresenter(BaseSchedulerProvider provider) {
return new MapPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.FLATMAP)
public static CodeLabContract.Presenter provideFlatMapPresenter(BaseSchedulerProvider provider) {
return new FlatMapPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.ASSIGNMENT)
public static CodeLabContract.Presenter provideAssignmentPresenter(BaseSchedulerProvider provider) {
return new AssignmentPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.BATTLE)
public static CodeLabContract.Presenter provideBattlePresenter(BaseSchedulerProvider provider) {
return new BattlePresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.BATTLE_FLOW)
public static CodeLabContract.Presenter provideBattleFlowPresenter(BaseSchedulerProvider provider) {
return new BattleFlowPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.THREAD)
public static CodeLabContract.Presenter provideThreadingPresenter(BaseSchedulerProvider provider) {
return new ThreadingPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.TIME_TURNER)
public static CodeLabContract.Presenter provideTimeTurnerPresenter(BaseSchedulerProvider provider) {
return new TimeTurnerPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.SUBJECT)
public static CodeLabContract.Presenter provideSubjetPresenter(BaseSchedulerProvider provider) {
return new SubjectPresenter(provider);
}
@ActivityScope
@Provides
@IntoMap
@CLEnumKey(CodeLabData.RELAY)
public static CodeLabContract.Presenter provideRelayPresenter(BaseSchedulerProvider provider) {
return new RelayPresenter(provider);
}
@ActivityScope
@Provides
public CodeLabContract.Presenter provideDefaultPresenter(BaseSchedulerProvider provider) {
return new ChillPresenter(provider);
}
@ActivityScope
@Provides
public CodeLabListPresenter provideCLListPresenter(BaseSchedulerProvider provider) {
return new CodeLabListPresenter(provider);
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
* Copyright (C) 2011 Jake Wharton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helen.andbase.widget.viewpagerindicator;
import android.content.Context;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.HorizontalScrollView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.helen.andbase.R;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import static android.view.ViewGroup.LayoutParams.WRAP_CONTENT;
/**
* This widget implements the dynamic action bar tab behavior that can change
* across different configurations or circumstances.
*/
public class TabPageIndicator extends HorizontalScrollView implements PageIndicator {
/** Title text used when no title is provided by the adapter. */
private static final CharSequence EMPTY_TITLE = "";
/**
* Interface for a callback when the selected tab has been reselected.
*/
public interface OnTabReselectedListener {
/**
* Callback when the selected tab has been reselected.
*
* @param position Position of the current center item.
*/
void onTabReselected(int position);
}
private Runnable mTabSelector;
private final OnClickListener mTabClickListener = new OnClickListener() {
public void onClick(View view) {
TabView tabView = (TabView)view;
final int oldSelected = mViewPager.getCurrentItem();
final int newSelected = tabView.getIndex();
mViewPager.setCurrentItem(newSelected);
if (oldSelected == newSelected && mTabReselectedListener != null) {
mTabReselectedListener.onTabReselected(newSelected);
}
}
};
private final IcsLinearLayout mTabLayout;
private ViewPager mViewPager;
private OnPageChangeListener mListener;
private int mMaxTabWidth;
private int mSelectedTabIndex;
private OnTabReselectedListener mTabReselectedListener;
public TabPageIndicator(Context context) {
this(context, null);
}
public TabPageIndicator(Context context, AttributeSet attrs) {
super(context, attrs);
setHorizontalScrollBarEnabled(false);
mTabLayout = new IcsLinearLayout(context, R.attr.vpiTabPageIndicatorStyle);
addView(mTabLayout, new ViewGroup.LayoutParams(WRAP_CONTENT, MATCH_PARENT));
}
public void setOnTabReselectedListener(OnTabReselectedListener listener) {
mTabReselectedListener = listener;
}
@Override
public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final boolean lockedExpanded = widthMode == MeasureSpec.EXACTLY;
setFillViewport(lockedExpanded);
final int childCount = mTabLayout.getChildCount();
if (childCount > 1 && (widthMode == MeasureSpec.EXACTLY || widthMode == MeasureSpec.AT_MOST)) {
if (childCount > 2) {
mMaxTabWidth = (int)(MeasureSpec.getSize(widthMeasureSpec) * 0.4f);
} else {
mMaxTabWidth = MeasureSpec.getSize(widthMeasureSpec) / 2;
}
} else {
mMaxTabWidth = -1;
}
final int oldWidth = getMeasuredWidth();
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
final int newWidth = getMeasuredWidth();
if (lockedExpanded && oldWidth != newWidth) {
// Recenter the tab display if we're at a new (scrollable) size.
setCurrentItem(mSelectedTabIndex);
}
}
private void animateToTab(final int position) {
final View tabView = mTabLayout.getChildAt(position);
if (mTabSelector != null) {
removeCallbacks(mTabSelector);
}
mTabSelector = new Runnable() {
public void run() {
final int scrollPos = tabView.getLeft() - (getWidth() - tabView.getWidth()) / 2;
smoothScrollTo(scrollPos, 0);
mTabSelector = null;
}
};
post(mTabSelector);
}
@Override
public void onAttachedToWindow() {
super.onAttachedToWindow();
if (mTabSelector != null) {
// Re-post the selector we saved
post(mTabSelector);
}
}
@Override
public void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mTabSelector != null) {
removeCallbacks(mTabSelector);
}
}
private void addTab(int index, CharSequence text, int iconResId) {
final TabView tabView = new TabView(getContext());
tabView.mIndex = index;
tabView.setFocusable(true);
tabView.setOnClickListener(mTabClickListener);
tabView.setText(text);
if (iconResId != 0) {
tabView.setCompoundDrawablesWithIntrinsicBounds(iconResId, 0, 0, 0);
}
mTabLayout.addView(tabView, new LinearLayout.LayoutParams(0, MATCH_PARENT, 1));
}
@Override
public void onPageScrollStateChanged(int arg0) {
if (mListener != null) {
mListener.onPageScrollStateChanged(arg0);
}
}
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
if (mListener != null) {
mListener.onPageScrolled(arg0, arg1, arg2);
}
}
@Override
public void onPageSelected(int arg0) {
setCurrentItem(arg0);
if (mListener != null) {
mListener.onPageSelected(arg0);
}
}
@Override
public void setViewPager(ViewPager view) {
if (mViewPager == view) {
return;
}
if (mViewPager != null) {
mViewPager.setOnPageChangeListener(null);
}
final PagerAdapter adapter = view.getAdapter();
if (adapter == null) {
throw new IllegalStateException("ViewPager does not have adapter instance.");
}
mViewPager = view;
view.setOnPageChangeListener(this);
notifyDataSetChanged();
}
public void notifyDataSetChanged() {
mTabLayout.removeAllViews();
PagerAdapter adapter = mViewPager.getAdapter();
IconPagerAdapter iconAdapter = null;
if (adapter instanceof IconPagerAdapter) {
iconAdapter = (IconPagerAdapter)adapter;
}
final int count = adapter.getCount();
for (int i = 0; i < count; i++) {
CharSequence title = adapter.getPageTitle(i);
if (title == null) {
title = EMPTY_TITLE;
}
int iconResId = 0;
if (iconAdapter != null) {
iconResId = iconAdapter.getIconResId(i);
}
addTab(i, title, iconResId);
}
if (mSelectedTabIndex > count) {
mSelectedTabIndex = count - 1;
}
setCurrentItem(mSelectedTabIndex);
requestLayout();
}
@Override
public void setViewPager(ViewPager view, int initialPosition) {
setViewPager(view);
setCurrentItem(initialPosition);
}
@Override
public void setCurrentItem(int item) {
if (mViewPager == null) {
throw new IllegalStateException("ViewPager has not been bound.");
}
mSelectedTabIndex = item;
mViewPager.setCurrentItem(item);
final int tabCount = mTabLayout.getChildCount();
for (int i = 0; i < tabCount; i++) {
final View child = mTabLayout.getChildAt(i);
final boolean isSelected = (i == item);
child.setSelected(isSelected);
if (isSelected) {
animateToTab(item);
}
}
}
@Override
public void setOnPageChangeListener(OnPageChangeListener listener) {
mListener = listener;
}
private class TabView extends TextView {
private int mIndex;
public TabView(Context context) {
super(context, null, R.attr.vpiTabPageIndicatorStyle);
}
@Override
public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
// Re-measure if we went beyond our maximum size.
if (mMaxTabWidth > 0 && getMeasuredWidth() > mMaxTabWidth) {
super.onMeasure(MeasureSpec.makeMeasureSpec(mMaxTabWidth, MeasureSpec.EXACTLY),
heightMeasureSpec);
}
}
public int getIndex() {
return mIndex;
}
}
}
| |
/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Server
* Copyright (C) 2008, 2009, 2010 Zimbra, Inc.
*
* The contents of this file are subject to the Zimbra Public License
* Version 1.3 ("License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.zimbra.com/license.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
* ***** END LICENSE BLOCK *****
*/
package com.zimbra.cs.mailbox.calendar;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import com.zimbra.common.calendar.Attach;
import com.zimbra.common.calendar.Geo;
import com.zimbra.common.calendar.ICalTimeZone;
import com.zimbra.common.calendar.TZIDMapper;
import com.zimbra.common.calendar.TimeZoneMap;
import com.zimbra.common.calendar.WellKnownTimeZones;
import com.zimbra.common.calendar.ZCalendar.ICalTok;
import com.zimbra.common.calendar.ZCalendar.ZParameter;
import com.zimbra.common.calendar.ZCalendar.ZProperty;
import com.zimbra.common.localconfig.DebugConfig;
import com.zimbra.common.service.ServiceException;
import com.zimbra.common.util.ZimbraLog;
import com.zimbra.cs.account.Account;
import com.zimbra.cs.account.Provisioning;
import com.zimbra.cs.mailbox.Metadata;
public class Util {
private static final String FN_NAME = "n";
private static final String FN_NUM_XPROPS_OR_XPARAMS = "numX";
private static final String FN_VALUE = "v";
private static final String FN_XPROP_OR_XPARAM = "x";
private static final String FN_TZID = "tzid";
private static final String FN_STANDARD_OFFSET = "so";
private static final String FN_DAYLIGHT_OFFSET = "do";
private static final String FN_DAYTOSTD_DTSTART = "d2ss";
private static final String FN_STDTODAY_DTSTART = "s2ds";
private static final String FN_DAYTOSTD_RULE = "d2sr";
private static final String FN_STDTODAY_RULE = "s2dr";
private static final String FN_STANDARD_TZNAME = "sn";
private static final String FN_DAYLIGHT_TZNAME = "dn";
private static final String FN_CONTENT_TYPE = "ct";
private static final String FN_URI = "uri";
private static final String FN_BINARY = "bin";
private static final String FN_LATITUDE = "lat";
private static final String FN_LONGITUDE = "lon";
public static void encodeXParamsAsMetadata(Metadata meta, Iterator<ZParameter> xparamsIter) {
int xparamCount = 0;
for (; xparamsIter.hasNext(); ) {
ZParameter xparam = xparamsIter.next();
String paramName = xparam.getName();
if (paramName == null) continue;
Metadata paramMeta = new Metadata();
paramMeta.put(FN_NAME, paramName);
String paramValue = xparam.getValue();
if (paramValue != null)
paramMeta.put(FN_VALUE, paramValue);
meta.put(FN_XPROP_OR_XPARAM + xparamCount, paramMeta);
xparamCount++;
}
if (xparamCount > 0)
meta.put(FN_NUM_XPROPS_OR_XPARAMS, xparamCount);
}
public static void encodeXPropsAsMetadata(Metadata meta, Iterator<ZProperty> xpropsIter) {
int xpropCount = 0;
for (; xpropsIter.hasNext(); ) {
ZProperty xprop = xpropsIter.next();
String propName = xprop.getName();
if (propName == null) continue;
// Never persist the transport-only special x-prop X-ZIMBRA-CHANGES.
if (propName.equalsIgnoreCase(ICalTok.X_ZIMBRA_CHANGES.toString())) continue;
Metadata propMeta = new Metadata();
propMeta.put(FN_NAME, propName);
String propValue = xprop.getValue();
if (propValue != null)
propMeta.put(FN_VALUE, propValue);
encodeXParamsAsMetadata(propMeta, xprop.parameterIterator());
meta.put(FN_XPROP_OR_XPARAM + xpropCount, propMeta);
xpropCount++;
}
if (xpropCount > 0)
meta.put(FN_NUM_XPROPS_OR_XPARAMS, xpropCount);
}
public static List<ZParameter> decodeXParamsFromMetadata(Metadata meta) throws ServiceException {
int xparamCount = (int) meta.getLong(FN_NUM_XPROPS_OR_XPARAMS, 0);
if (xparamCount > 0) {
List<ZParameter> list = new ArrayList<ZParameter>(xparamCount);
for (int paramNum = 0; paramNum < xparamCount; paramNum++) {
Metadata paramMeta = meta.getMap(FN_XPROP_OR_XPARAM + paramNum, true);
if (paramMeta == null) continue;
String paramName = paramMeta.get(FN_NAME, null);
if (paramName == null) continue;
String paramValue = paramMeta.get(FN_VALUE, null);
ZParameter xparam = new ZParameter(paramName, paramValue);
list.add(xparam);
}
return list;
}
return null;
}
public static List<ZProperty> decodeXPropsFromMetadata(Metadata meta) throws ServiceException {
int xpropCount = (int) meta.getLong(FN_NUM_XPROPS_OR_XPARAMS, 0);
if (xpropCount > 0) {
List<ZProperty> list = new ArrayList<ZProperty>(xpropCount);
for (int propNum = 0; propNum < xpropCount; propNum++) {
Metadata propMeta = meta.getMap(FN_XPROP_OR_XPARAM + propNum, true);
if (propMeta == null) continue;
String propName = propMeta.get(FN_NAME, null);
if (propName == null) continue;
// Never persist the transport-only special x-prop X-ZIMBRA-CHANGES.
if (propName.equalsIgnoreCase(ICalTok.X_ZIMBRA_CHANGES.toString())) continue;
ZProperty xprop = new ZProperty(propName);
String propValue = propMeta.get(FN_VALUE, null);
if (propValue != null)
xprop.setValue(propValue);
List<ZParameter> xparams = decodeXParamsFromMetadata(propMeta);
if (xparams != null) {
for (ZParameter xparam : xparams) {
xprop.addParameter(xparam);
}
}
list.add(xprop);
}
return list;
}
return null;
}
/**
* Returns the time zone for the given account.
*/
public static ICalTimeZone getAccountTimeZone(Account account) {
String tzid = account.getAttr(Provisioning.A_zimbraPrefTimeZoneId);
tzid = TZIDMapper.canonicalize(tzid);
ICalTimeZone timeZone = WellKnownTimeZones.getTimeZoneById(tzid);
if (timeZone == null) {
return ICalTimeZone.getUTC();
}
return timeZone;
}
public static Metadata encodeAsMetadata(ICalTimeZone tz) {
Metadata meta = new Metadata();
String tzid = tz.getID();
meta.put(FN_TZID, tzid);
// For well-known time zone we only need the TZID.
if (ICalTimeZone.lookupByTZID(tzid) != null)
return meta;
meta.put(FN_STANDARD_OFFSET, tz.getStandardOffset());
meta.put(FN_DAYTOSTD_DTSTART, tz.getStandardDtStart());
meta.put(FN_DAYTOSTD_RULE, tz.getStandardRule());
meta.put(FN_STANDARD_TZNAME, tz.getStandardTzname());
meta.put(FN_DAYLIGHT_OFFSET, tz.getDaylightOffset());
meta.put(FN_STDTODAY_DTSTART, tz.getDaylightDtStart());
meta.put(FN_STDTODAY_RULE, tz.getDaylightRule());
meta.put(FN_DAYLIGHT_TZNAME, tz.getDaylightTzname());
return meta;
}
public static ICalTimeZone decodeTimeZoneFromMetadata(Metadata m) throws ServiceException {
String tzid;
if (m.containsKey(FN_TZID)) {
tzid = m.get(FN_TZID);
boolean hasDef = m.containsKey(FN_STANDARD_OFFSET);
if (!DebugConfig.disableCalendarTZMatchByID || !hasDef) {
ICalTimeZone tz = WellKnownTimeZones.getTimeZoneById(tzid);
if (tz != null) {
return tz;
} else if (!hasDef) {
ZimbraLog.calendar.debug("Unknown time zone \"" + tzid + "\" in metadata; using UTC instead");
return ICalTimeZone.getUTC().cloneWithNewTZID(tzid);
}
}
} else
tzid = "unknown time zone";
ICalTimeZone newTz = newICalTimeZone(tzid, m);
ICalTimeZone tz = ICalTimeZone.lookupByRule(newTz, false);
return tz;
}
private static ICalTimeZone newICalTimeZone(String tzId, Metadata meta) throws ServiceException {
int standardOffset = (int) meta.getLong(FN_STANDARD_OFFSET, 0);
String dayToStdDtStart = meta.get(FN_DAYTOSTD_DTSTART, null);
String dayToStdRule = meta.get(FN_DAYTOSTD_RULE, null);
String standardTzname = meta.get(FN_STANDARD_TZNAME, null);
int daylightOffset = (int) meta.getLong(FN_DAYLIGHT_OFFSET, 0);
String stdToDayDtStart = meta.get(FN_STDTODAY_DTSTART, ICalTimeZone.DEFAULT_DTSTART);
String stdToDayRule = meta.get(FN_STDTODAY_RULE, null);
String daylightTzname = meta.get(FN_DAYLIGHT_TZNAME, null);
ICalTimeZone tz = new ICalTimeZone(tzId, standardOffset, dayToStdDtStart, dayToStdRule, standardTzname,
daylightOffset, stdToDayDtStart, stdToDayRule, daylightTzname);
tz.initFromICalData(true);
return tz;
}
public static Metadata encodeAsMetadata(TimeZoneMap tzmap) {
Metadata meta = new Metadata();
Map<String /* real TZID */, Integer /* index */> tzIndex = new HashMap<String, Integer>();
int nextIndex = 0;
for (Iterator<Entry<String, ICalTimeZone>> iter = tzmap.getMap().entrySet().iterator(); iter.hasNext(); ) {
Entry<String, ICalTimeZone> entry = iter.next();
String tzid = entry.getKey();
if (tzid == null || tzid.length() < 1) // ignore null/empty TZIDs (bug 25183)
continue;
ICalTimeZone zone = entry.getValue();
String realTzid = zone.getID();
if (!tzIndex.containsKey(realTzid)) {
meta.put("#" + nextIndex, encodeAsMetadata(zone));
tzIndex.put(realTzid, nextIndex);
++nextIndex;
}
}
for (Iterator<Entry<String, String>> iter = tzmap.getAliasMap().entrySet().iterator(); iter.hasNext(); ) {
Entry<String, String> entry = iter.next();
String alias = entry.getKey();
String realTzid = entry.getValue();
if (tzIndex.containsKey(realTzid)) {
int index = tzIndex.get(realTzid);
meta.put(alias, index);
}
}
return meta;
}
/**
*
* @param meta
* @param localTZ local time zone of user account
* @return
* @throws ServiceException
*/
public static TimeZoneMap decodeFromMetadata(Metadata meta, ICalTimeZone localTZ) throws ServiceException {
Map<String, ?> map = meta.asMap();
Map<String, String> aliasMap = new HashMap<String, String>();
ICalTimeZone[] tzlist = new ICalTimeZone[map.size()];
// first time, find the tz's
for (Map.Entry<String, ?> entry : map.entrySet()) {
String key = entry.getKey();
if (key != null && key.length() > 0) { // ignore null/empty TZIDs (bug 25183)
if (key.charAt(0) == '#') {
int idx = Integer.parseInt(key.substring(1));
Metadata tzMeta = (Metadata) entry.getValue();
String tzidMeta = tzMeta.get(FN_TZID, null);
if (tzidMeta != null) {
ICalTimeZone tz = decodeTimeZoneFromMetadata(tzMeta);
if (tz != null) {
String tzid = tz.getID();
if (!DebugConfig.disableCalendarTZMatchByID)
tzid = TZIDMapper.canonicalize(tzid);
if (!tzidMeta.equals(tzid)) {
aliasMap.put(tzidMeta, tzid);
tz = WellKnownTimeZones.getTimeZoneById(tzid);
}
tzlist[idx] = tz;
}
}
}
}
}
Map<String, ICalTimeZone> tzmap = new HashMap<String, ICalTimeZone>();
for (ICalTimeZone tz : tzlist) {
if (tz != null)
tzmap.put(tz.getID(), tz);
}
// second time, build the real map
for (Map.Entry<String, ?> entry : map.entrySet()) {
String tzid = entry.getKey();
if (tzid != null && tzid.length() > 0) { // ignore null/empty TZIDs (bug 25183)
if (tzid.charAt(0) != '#') {
int idx = -1;
try {
idx = Integer.parseInt(entry.getValue().toString());
} catch (NumberFormatException e) {}
if (idx >= 0 && idx < tzlist.length) {
ICalTimeZone tz = tzlist[idx];
if (tz != null) {
String realId = tz.getID();
if (!realId.equals(tzid))
aliasMap.put(tzid, realId);
}
}
}
}
}
return new TimeZoneMap(tzmap, aliasMap, localTZ);
}
public static Metadata encodeMetadata(Attach att) {
Metadata meta = new Metadata();
if (att.getUri() != null) {
meta.put(FN_URI, att.getUri());
meta.put(FN_CONTENT_TYPE, att.getContentType());
} else {
meta.put(FN_BINARY, att.getBinaryB64Data());
}
return meta;
}
public static Attach decodeAttachFromMetadata(Metadata meta) {
String uri = meta.get(FN_URI, null);
if (uri != null) {
String ct = meta.get(FN_CONTENT_TYPE, null);
return new Attach(uri, ct);
} else {
String binary = meta.get(FN_BINARY, null);
return new Attach(binary);
}
}
public static Geo decodeGeoFromMetadata(Metadata meta) {
String lat = meta.get(FN_LATITUDE, "0");
String lon = meta.get(FN_LONGITUDE, "0");
return new Geo(lat, lon);
}
public static Metadata encodeMetadata(Geo geo) {
Metadata meta = new Metadata();
meta.put(Util.FN_LATITUDE, geo.getLatitude());
meta.put(Util.FN_LONGITUDE, geo.getLongitude());
return meta;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudtasks.v2beta3.model;
/**
* A unit of scheduled work.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Tasks API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Task extends com.google.api.client.json.GenericJson {
/**
* HTTP request that is sent to the App Engine app handler. An App Engine task is a task that has
* AppEngineHttpRequest set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AppEngineHttpRequest appEngineHttpRequest;
/**
* Output only. The time that the task was created. `create_time` will be truncated to the nearest
* second.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* Output only. The number of attempts dispatched. This count includes attempts which have been
* dispatched but haven't received a response.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer dispatchCount;
/**
* The deadline for requests sent to the worker. If the worker does not respond by this deadline
* then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. Cloud
* Tasks will retry the task according to the RetryConfig. Note that when the request is
* cancelled, Cloud Tasks will stop listening for the response, but whether the worker stops
* processing depends on the worker. For example, if the worker is stuck, it may not react to
* cancelled requests. The default and maximum values depend on the type of request: * For HTTP
* tasks, the default is 10 minutes. The deadline must be in the interval [15 seconds, 30
* minutes]. * For App Engine tasks, 0 indicates that the request has the default deadline. The
* default deadline depends on the [scaling
* type](https://cloud.google.com/appengine/docs/standard/go/how-instances-are-
* managed#instance_scaling) of the service: 10 minutes for standard apps with automatic scaling,
* 24 hours for standard apps with manual and basic scaling, and 60 minutes for flex apps. If the
* request deadline is set, it must be in the interval [15 seconds, 24 hours 15 seconds].
* Regardless of the task's `dispatch_deadline`, the app handler will not run for longer than than
* the service's timeout. We recommend setting the `dispatch_deadline` to at most a few seconds
* more than the app handler's timeout. For more information see
* [Timeouts](https://cloud.google.com/tasks/docs/creating-appengine-handlers#timeouts).
* `dispatch_deadline` will be truncated to the nearest millisecond. The deadline is an
* approximate deadline.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String dispatchDeadline;
/**
* Output only. The status of the task's first attempt. Only dispatch_time will be set. The other
* Attempt information is not retained by Cloud Tasks.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Attempt firstAttempt;
/**
* HTTP request that is sent to the task's target. An HTTP task is a task that has HttpRequest
* set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private HttpRequest httpRequest;
/**
* Output only. The status of the task's last attempt.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Attempt lastAttempt;
/**
* Optionally caller-specified in CreateTask. The task name. The task name must have the following
* format: `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` *
* `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or
* periods (.). For more information, see [Identifying projects](https://cloud.google.com
* /resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the
* canonical ID for the task's location. The list of available locations can be obtained by
* calling ListLocations. For more information, see https://cloud.google.com/about/locations/. *
* `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or hyphens (-). The maximum length
* is 100 characters. * `TASK_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens
* (-), or underscores (_). The maximum length is 500 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Output only. The number of attempts which have received a response.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer responseCount;
/**
* The time when the task is scheduled to be attempted. For App Engine queues, this is when the
* task will be attempted or retried. `schedule_time` will be truncated to the nearest
* microsecond.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String scheduleTime;
/**
* Output only. The view specifies which subset of the Task has been returned.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String view;
/**
* HTTP request that is sent to the App Engine app handler. An App Engine task is a task that has
* AppEngineHttpRequest set.
* @return value or {@code null} for none
*/
public AppEngineHttpRequest getAppEngineHttpRequest() {
return appEngineHttpRequest;
}
/**
* HTTP request that is sent to the App Engine app handler. An App Engine task is a task that has
* AppEngineHttpRequest set.
* @param appEngineHttpRequest appEngineHttpRequest or {@code null} for none
*/
public Task setAppEngineHttpRequest(AppEngineHttpRequest appEngineHttpRequest) {
this.appEngineHttpRequest = appEngineHttpRequest;
return this;
}
/**
* Output only. The time that the task was created. `create_time` will be truncated to the nearest
* second.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* Output only. The time that the task was created. `create_time` will be truncated to the nearest
* second.
* @param createTime createTime or {@code null} for none
*/
public Task setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* Output only. The number of attempts dispatched. This count includes attempts which have been
* dispatched but haven't received a response.
* @return value or {@code null} for none
*/
public java.lang.Integer getDispatchCount() {
return dispatchCount;
}
/**
* Output only. The number of attempts dispatched. This count includes attempts which have been
* dispatched but haven't received a response.
* @param dispatchCount dispatchCount or {@code null} for none
*/
public Task setDispatchCount(java.lang.Integer dispatchCount) {
this.dispatchCount = dispatchCount;
return this;
}
/**
* The deadline for requests sent to the worker. If the worker does not respond by this deadline
* then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. Cloud
* Tasks will retry the task according to the RetryConfig. Note that when the request is
* cancelled, Cloud Tasks will stop listening for the response, but whether the worker stops
* processing depends on the worker. For example, if the worker is stuck, it may not react to
* cancelled requests. The default and maximum values depend on the type of request: * For HTTP
* tasks, the default is 10 minutes. The deadline must be in the interval [15 seconds, 30
* minutes]. * For App Engine tasks, 0 indicates that the request has the default deadline. The
* default deadline depends on the [scaling
* type](https://cloud.google.com/appengine/docs/standard/go/how-instances-are-
* managed#instance_scaling) of the service: 10 minutes for standard apps with automatic scaling,
* 24 hours for standard apps with manual and basic scaling, and 60 minutes for flex apps. If the
* request deadline is set, it must be in the interval [15 seconds, 24 hours 15 seconds].
* Regardless of the task's `dispatch_deadline`, the app handler will not run for longer than than
* the service's timeout. We recommend setting the `dispatch_deadline` to at most a few seconds
* more than the app handler's timeout. For more information see
* [Timeouts](https://cloud.google.com/tasks/docs/creating-appengine-handlers#timeouts).
* `dispatch_deadline` will be truncated to the nearest millisecond. The deadline is an
* approximate deadline.
* @return value or {@code null} for none
*/
public String getDispatchDeadline() {
return dispatchDeadline;
}
/**
* The deadline for requests sent to the worker. If the worker does not respond by this deadline
* then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. Cloud
* Tasks will retry the task according to the RetryConfig. Note that when the request is
* cancelled, Cloud Tasks will stop listening for the response, but whether the worker stops
* processing depends on the worker. For example, if the worker is stuck, it may not react to
* cancelled requests. The default and maximum values depend on the type of request: * For HTTP
* tasks, the default is 10 minutes. The deadline must be in the interval [15 seconds, 30
* minutes]. * For App Engine tasks, 0 indicates that the request has the default deadline. The
* default deadline depends on the [scaling
* type](https://cloud.google.com/appengine/docs/standard/go/how-instances-are-
* managed#instance_scaling) of the service: 10 minutes for standard apps with automatic scaling,
* 24 hours for standard apps with manual and basic scaling, and 60 minutes for flex apps. If the
* request deadline is set, it must be in the interval [15 seconds, 24 hours 15 seconds].
* Regardless of the task's `dispatch_deadline`, the app handler will not run for longer than than
* the service's timeout. We recommend setting the `dispatch_deadline` to at most a few seconds
* more than the app handler's timeout. For more information see
* [Timeouts](https://cloud.google.com/tasks/docs/creating-appengine-handlers#timeouts).
* `dispatch_deadline` will be truncated to the nearest millisecond. The deadline is an
* approximate deadline.
* @param dispatchDeadline dispatchDeadline or {@code null} for none
*/
public Task setDispatchDeadline(String dispatchDeadline) {
this.dispatchDeadline = dispatchDeadline;
return this;
}
/**
* Output only. The status of the task's first attempt. Only dispatch_time will be set. The other
* Attempt information is not retained by Cloud Tasks.
* @return value or {@code null} for none
*/
public Attempt getFirstAttempt() {
return firstAttempt;
}
/**
* Output only. The status of the task's first attempt. Only dispatch_time will be set. The other
* Attempt information is not retained by Cloud Tasks.
* @param firstAttempt firstAttempt or {@code null} for none
*/
public Task setFirstAttempt(Attempt firstAttempt) {
this.firstAttempt = firstAttempt;
return this;
}
/**
* HTTP request that is sent to the task's target. An HTTP task is a task that has HttpRequest
* set.
* @return value or {@code null} for none
*/
public HttpRequest getHttpRequest() {
return httpRequest;
}
/**
* HTTP request that is sent to the task's target. An HTTP task is a task that has HttpRequest
* set.
* @param httpRequest httpRequest or {@code null} for none
*/
public Task setHttpRequest(HttpRequest httpRequest) {
this.httpRequest = httpRequest;
return this;
}
/**
* Output only. The status of the task's last attempt.
* @return value or {@code null} for none
*/
public Attempt getLastAttempt() {
return lastAttempt;
}
/**
* Output only. The status of the task's last attempt.
* @param lastAttempt lastAttempt or {@code null} for none
*/
public Task setLastAttempt(Attempt lastAttempt) {
this.lastAttempt = lastAttempt;
return this;
}
/**
* Optionally caller-specified in CreateTask. The task name. The task name must have the following
* format: `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` *
* `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or
* periods (.). For more information, see [Identifying projects](https://cloud.google.com
* /resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the
* canonical ID for the task's location. The list of available locations can be obtained by
* calling ListLocations. For more information, see https://cloud.google.com/about/locations/. *
* `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or hyphens (-). The maximum length
* is 100 characters. * `TASK_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens
* (-), or underscores (_). The maximum length is 500 characters.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Optionally caller-specified in CreateTask. The task name. The task name must have the following
* format: `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` *
* `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or
* periods (.). For more information, see [Identifying projects](https://cloud.google.com
* /resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the
* canonical ID for the task's location. The list of available locations can be obtained by
* calling ListLocations. For more information, see https://cloud.google.com/about/locations/. *
* `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or hyphens (-). The maximum length
* is 100 characters. * `TASK_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens
* (-), or underscores (_). The maximum length is 500 characters.
* @param name name or {@code null} for none
*/
public Task setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Output only. The number of attempts which have received a response.
* @return value or {@code null} for none
*/
public java.lang.Integer getResponseCount() {
return responseCount;
}
/**
* Output only. The number of attempts which have received a response.
* @param responseCount responseCount or {@code null} for none
*/
public Task setResponseCount(java.lang.Integer responseCount) {
this.responseCount = responseCount;
return this;
}
/**
* The time when the task is scheduled to be attempted. For App Engine queues, this is when the
* task will be attempted or retried. `schedule_time` will be truncated to the nearest
* microsecond.
* @return value or {@code null} for none
*/
public String getScheduleTime() {
return scheduleTime;
}
/**
* The time when the task is scheduled to be attempted. For App Engine queues, this is when the
* task will be attempted or retried. `schedule_time` will be truncated to the nearest
* microsecond.
* @param scheduleTime scheduleTime or {@code null} for none
*/
public Task setScheduleTime(String scheduleTime) {
this.scheduleTime = scheduleTime;
return this;
}
/**
* Output only. The view specifies which subset of the Task has been returned.
* @return value or {@code null} for none
*/
public java.lang.String getView() {
return view;
}
/**
* Output only. The view specifies which subset of the Task has been returned.
* @param view view or {@code null} for none
*/
public Task setView(java.lang.String view) {
this.view = view;
return this;
}
@Override
public Task set(String fieldName, Object value) {
return (Task) super.set(fieldName, value);
}
@Override
public Task clone() {
return (Task) super.clone();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.xsort;
import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.TimeUnit;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.cache.VectorAccessibleSerializable;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.exec.record.SchemaUtil;
import org.apache.drill.exec.record.TransferPair;
import org.apache.drill.exec.record.TypedFieldId;
import org.apache.drill.exec.record.VectorAccessible;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.record.WritableBatch;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Stopwatch;
public class BatchGroup implements VectorAccessible, AutoCloseable {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BatchGroup.class);
private VectorContainer currentContainer;
private SelectionVector2 sv2;
private int pointer = 0;
private FSDataInputStream inputStream;
private FSDataOutputStream outputStream;
private Path path;
private FileSystem fs;
private BufferAllocator allocator;
private int spilledBatches = 0;
private OperatorContext context;
private BatchSchema schema;
public BatchGroup(VectorContainer container, SelectionVector2 sv2, OperatorContext context) {
this.sv2 = sv2;
this.currentContainer = container;
this.context = context;
}
public BatchGroup(VectorContainer container, FileSystem fs, String path, OperatorContext context) {
currentContainer = container;
this.fs = fs;
this.path = new Path(path);
this.allocator = context.getAllocator();
this.context = context;
}
public SelectionVector2 getSv2() {
return sv2;
}
/**
* Updates the schema for this batch group. The current as well as any deserialized batches will be coerced to this schema
* @param schema
*/
public void setSchema(BatchSchema schema) {
currentContainer = SchemaUtil.coerceContainer(currentContainer, schema, context);
this.schema = schema;
}
public void addBatch(VectorContainer newContainer) throws IOException {
assert fs != null;
assert path != null;
if (outputStream == null) {
outputStream = fs.create(path);
}
int recordCount = newContainer.getRecordCount();
WritableBatch batch = WritableBatch.getBatchNoHVWrap(recordCount, newContainer, false);
VectorAccessibleSerializable outputBatch = new VectorAccessibleSerializable(batch, allocator);
Stopwatch watch = new Stopwatch();
watch.start();
outputBatch.writeToStream(outputStream);
newContainer.zeroVectors();
logger.debug("Took {} us to spill {} records", watch.elapsed(TimeUnit.MICROSECONDS), recordCount);
spilledBatches++;
}
private VectorContainer getBatch() throws IOException {
assert fs != null;
assert path != null;
if (inputStream == null) {
inputStream = fs.open(path);
}
VectorAccessibleSerializable vas = new VectorAccessibleSerializable(allocator);
Stopwatch watch = new Stopwatch();
watch.start();
vas.readFromStream(inputStream);
VectorContainer c = vas.get();
if (schema != null) {
c = SchemaUtil.coerceContainer(c, schema, context);
}
// logger.debug("Took {} us to read {} records", watch.elapsed(TimeUnit.MICROSECONDS), c.getRecordCount());
spilledBatches--;
currentContainer.zeroVectors();
Iterator<VectorWrapper<?>> wrapperIterator = c.iterator();
for (VectorWrapper w : currentContainer) {
TransferPair pair = wrapperIterator.next().getValueVector().makeTransferPair(w.getValueVector());
pair.transfer();
}
currentContainer.setRecordCount(c.getRecordCount());
c.zeroVectors();
return c;
}
public int getNextIndex() {
int val;
if (pointer == getRecordCount()) {
if (spilledBatches == 0) {
return -1;
}
try {
currentContainer.zeroVectors();
getBatch();
} catch (IOException e) {
throw new RuntimeException(e);
}
pointer = 1;
return 0;
}
if (sv2 == null) {
val = pointer;
pointer++;
assert val < currentContainer.getRecordCount();
} else {
val = pointer;
pointer++;
assert val < currentContainer.getRecordCount();
val = sv2.getIndex(val);
}
return val;
}
public VectorContainer getContainer() {
return currentContainer;
}
@Override
public void close() throws IOException {
currentContainer.zeroVectors();
if (sv2 != null) {
sv2.clear();
}
if (outputStream != null) {
outputStream.close();
}
if (inputStream != null) {
inputStream.close();
}
if (fs != null && fs.exists(path)) {
fs.delete(path, false);
}
}
public void closeOutputStream() throws IOException {
if (outputStream != null) {
outputStream.close();
}
}
@Override
public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
return currentContainer.getValueAccessorById(clazz, ids);
}
@Override
public TypedFieldId getValueVectorId(SchemaPath path) {
return currentContainer.getValueVectorId(path);
}
@Override
public BatchSchema getSchema() {
return currentContainer.getSchema();
}
@Override
public int getRecordCount() {
if (sv2 != null) {
return sv2.getCount();
} else {
return currentContainer.getRecordCount();
}
}
@Override
public Iterator<VectorWrapper<?>> iterator() {
return currentContainer.iterator();
}
}
| |
/**
*/
package CIM.IEC61970.Informative.MarketOperations.impl;
import CIM.IEC61970.Core.CorePackage;
import CIM.IEC61970.Core.Terminal;
import CIM.IEC61970.Informative.MarketOperations.MarketOperationsPackage;
import CIM.IEC61970.Informative.MarketOperations.TerminalConstraintTerm;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Terminal Constraint Term</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link CIM.IEC61970.Informative.MarketOperations.impl.TerminalConstraintTermImpl#getTerminal <em>Terminal</em>}</li>
* </ul>
*
* @generated
*/
public class TerminalConstraintTermImpl extends ConstraintTermImpl implements TerminalConstraintTerm {
/**
* The cached value of the '{@link #getTerminal() <em>Terminal</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTerminal()
* @generated
* @ordered
*/
protected Terminal terminal;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TerminalConstraintTermImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return MarketOperationsPackage.Literals.TERMINAL_CONSTRAINT_TERM;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Terminal getTerminal() {
if (terminal != null && terminal.eIsProxy()) {
InternalEObject oldTerminal = (InternalEObject)terminal;
terminal = (Terminal)eResolveProxy(oldTerminal);
if (terminal != oldTerminal) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL, oldTerminal, terminal));
}
}
return terminal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Terminal basicGetTerminal() {
return terminal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetTerminal(Terminal newTerminal, NotificationChain msgs) {
Terminal oldTerminal = terminal;
terminal = newTerminal;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL, oldTerminal, newTerminal);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTerminal(Terminal newTerminal) {
if (newTerminal != terminal) {
NotificationChain msgs = null;
if (terminal != null)
msgs = ((InternalEObject)terminal).eInverseRemove(this, CorePackage.TERMINAL__TERMINAL_CONSTRAINTS, Terminal.class, msgs);
if (newTerminal != null)
msgs = ((InternalEObject)newTerminal).eInverseAdd(this, CorePackage.TERMINAL__TERMINAL_CONSTRAINTS, Terminal.class, msgs);
msgs = basicSetTerminal(newTerminal, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL, newTerminal, newTerminal));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL:
if (terminal != null)
msgs = ((InternalEObject)terminal).eInverseRemove(this, CorePackage.TERMINAL__TERMINAL_CONSTRAINTS, Terminal.class, msgs);
return basicSetTerminal((Terminal)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL:
return basicSetTerminal(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL:
if (resolve) return getTerminal();
return basicGetTerminal();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL:
setTerminal((Terminal)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL:
setTerminal((Terminal)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case MarketOperationsPackage.TERMINAL_CONSTRAINT_TERM__TERMINAL:
return terminal != null;
}
return super.eIsSet(featureID);
}
} //TerminalConstraintTermImpl
| |
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.buffer.BufType;
import io.netty.buffer.BufUtil;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.MessageBuf;
import io.netty.util.Signal;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
/**
* Utility methods for use within your {@link ChannelHandler} implementation.
*/
public final class ChannelHandlerUtil {
public static final Signal ABORT = new Signal(ChannelHandlerUtil.class.getName() + ".ABORT");
private static final InternalLogger logger = InternalLoggerFactory.getInstance(ChannelHandlerUtil.class);
public static <T> void handleInboundBufferUpdated(
ChannelHandlerContext ctx, SingleInboundMessageHandler<T> handler) throws Exception {
MessageBuf<Object> in = ctx.inboundMessageBuffer();
if (in.isEmpty() || !handler.beginMessageReceived(ctx)) {
return;
}
MessageBuf<Object> out = ctx.nextInboundMessageBuffer();
int oldOutSize = out.size();
try {
for (;;) {
Object msg = in.poll();
if (msg == null) {
break;
}
if (!handler.acceptInboundMessage(msg)) {
out.add(msg);
continue;
}
@SuppressWarnings("unchecked")
T imsg = (T) msg;
try {
handler.messageReceived(ctx, imsg);
} finally {
BufUtil.release(imsg);
}
}
} catch (Signal abort) {
abort.expect(ABORT);
} finally {
if (oldOutSize != out.size()) {
ctx.fireInboundBufferUpdated();
}
handler.endMessageReceived(ctx);
}
}
public static <T> void handleFlush(
ChannelHandlerContext ctx, ChannelPromise promise,
SingleOutboundMessageHandler<T> handler) throws Exception {
handleFlush(ctx, promise, true, handler);
}
public static <T> void handleFlush(
ChannelHandlerContext ctx, ChannelPromise promise, boolean closeOnFailedFlush,
SingleOutboundMessageHandler<T> handler) throws Exception {
MessageBuf<Object> in = ctx.outboundMessageBuffer();
final int inSize = in.size();
if (inSize == 0) {
ctx.flush(promise);
return;
}
boolean failed = false;
int processed = 0;
try {
if (!handler.beginFlush(ctx)) {
throw new IncompleteFlushException(
"beginFlush(..) rejected the flush request by returning false. " +
"none of " + inSize + " message(s) fulshed.");
}
for (;;) {
Object msg = in.poll();
if (msg == null) {
break;
}
if (!handler.acceptOutboundMessage(msg)) {
addToNextOutboundBuffer(ctx, msg);
processed ++;
continue;
}
@SuppressWarnings("unchecked")
T imsg = (T) msg;
try {
handler.flush(ctx, imsg);
processed ++;
} finally {
BufUtil.release(imsg);
}
}
} catch (Throwable t) {
failed = true;
IncompleteFlushException pfe;
if (t instanceof IncompleteFlushException) {
pfe = (IncompleteFlushException) t;
} else {
String msg = processed + " out of " + inSize + " message(s) flushed";
if (t instanceof Signal) {
Signal abort = (Signal) t;
abort.expect(ABORT);
pfe = new IncompleteFlushException("aborted: " + msg);
} else {
pfe = new IncompleteFlushException(msg, t);
}
}
fail(ctx, promise, closeOnFailedFlush, pfe);
}
try {
handler.endFlush(ctx);
} catch (Throwable t) {
failed = true;
fail(ctx, promise, closeOnFailedFlush, t);
}
if (!failed) {
ctx.flush(promise);
}
}
private static void fail(
ChannelHandlerContext ctx, ChannelPromise promise, boolean closeOnFailedFlush, Throwable cause) {
if (promise.tryFailure(cause)) {
if (closeOnFailedFlush) {
ctx.close();
}
} else {
logger.warn("endFlush() raised a masked exception due to failed flush().", cause);
}
}
/**
* Allocate a {@link ByteBuf} taking the {@link ChannelConfig#getDefaultHandlerByteBufType()}
* setting into account.
*/
public static ByteBuf allocate(ChannelHandlerContext ctx) {
switch(ctx.channel().config().getDefaultHandlerByteBufType()) {
case DIRECT:
return ctx.alloc().directBuffer();
case PREFER_DIRECT:
return ctx.alloc().ioBuffer();
case HEAP:
return ctx.alloc().heapBuffer();
default:
throw new IllegalStateException();
}
}
/**
* Allocate a {@link ByteBuf} taking the {@link ChannelConfig#getDefaultHandlerByteBufType()}
* setting into account.
*/
public static ByteBuf allocate(ChannelHandlerContext ctx, int initialCapacity) {
switch(ctx.channel().config().getDefaultHandlerByteBufType()) {
case DIRECT:
return ctx.alloc().directBuffer(initialCapacity);
case PREFER_DIRECT:
return ctx.alloc().ioBuffer(initialCapacity);
case HEAP:
return ctx.alloc().heapBuffer(initialCapacity);
default:
throw new IllegalStateException();
}
}
/**
* Allocate a {@link ByteBuf} taking the {@link ChannelConfig#getDefaultHandlerByteBufType()}
* setting into account.
*/
public static ByteBuf allocate(ChannelHandlerContext ctx, int initialCapacity, int maxCapacity) {
switch(ctx.channel().config().getDefaultHandlerByteBufType()) {
case DIRECT:
return ctx.alloc().directBuffer(initialCapacity, maxCapacity);
case PREFER_DIRECT:
return ctx.alloc().ioBuffer(initialCapacity, maxCapacity);
case HEAP:
return ctx.alloc().heapBuffer(initialCapacity, maxCapacity);
default:
throw new IllegalStateException();
}
}
/**
* Add the msg to the next outbound buffer in the {@link ChannelPipeline}. This takes special care of
* msgs that are of type {@link ByteBuf}.
*/
public static boolean addToNextOutboundBuffer(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
if (ctx.nextOutboundBufferType() == BufType.BYTE) {
ctx.nextOutboundByteBuffer().writeBytes((ByteBuf) msg);
return true;
}
}
return ctx.nextOutboundMessageBuffer().add(msg);
}
/**
* Add the msg to the next inbound buffer in the {@link ChannelPipeline}. This takes special care of
* msgs that are of type {@link ByteBuf}.
*/
public static boolean addToNextInboundBuffer(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
if (ctx.nextInboundBufferType() == BufType.BYTE) {
ctx.nextInboundByteBuffer().writeBytes((ByteBuf) msg);
return true;
}
}
return ctx.nextInboundMessageBuffer().add(msg);
}
private ChannelHandlerUtil() { }
public interface SingleInboundMessageHandler<T> {
/**
* Returns {@code true} if and only if the specified message can be handled by this handler.
*
* @param msg the message
*/
boolean acceptInboundMessage(Object msg) throws Exception;
/**
* Will get notified once {@link ChannelStateHandler#inboundBufferUpdated(ChannelHandlerContext)} was called.
*
* If this method returns {@code false} no further processing of the {@link MessageBuf}
* will be done until the next call of {@link ChannelStateHandler#inboundBufferUpdated(ChannelHandlerContext)}.
*
* This will return {@code true} by default, and may get overriden by sub-classes for
* special handling.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ChannelHandler} belongs to
*/
boolean beginMessageReceived(ChannelHandlerContext ctx) throws Exception;
/**
* Is called once a message was received.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ChannelHandler} belongs to
* @param msg the message to handle
*/
void messageReceived(ChannelHandlerContext ctx, T msg) throws Exception;
/**
* Is called when {@link #messageReceived(ChannelHandlerContext, Object)} returns.
*
* Super-classes may-override this for special handling.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ChannelHandler} belongs to
*/
void endMessageReceived(ChannelHandlerContext ctx) throws Exception;
}
public interface SingleOutboundMessageHandler<T> {
/**
* Returns {@code true} if and only if the specified message can be handled by this handler.
*
* @param msg the message
*/
boolean acceptOutboundMessage(Object msg) throws Exception;
/**
* Will get notified once {@link ChannelOperationHandler#flush(ChannelHandlerContext, ChannelPromise)}
* was called.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ChannelHandler} belongs to
*
* @return {@code true} to accept the flush request. {@code false} to reject the flush request and
* to fail the promise associated with the flush request with {@link IncompleteFlushException}.
*/
boolean beginFlush(ChannelHandlerContext ctx) throws Exception;
/**
* Is called once a message is being flushed.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ChannelHandler} belongs to
* @param msg the message to handle
*/
void flush(ChannelHandlerContext ctx, T msg) throws Exception;
/**
* Is called when {@link ChannelOperationHandler#flush(ChannelHandlerContext, ChannelPromise)} returns.
*
* Super-classes may-override this for special handling.
*
* @param ctx the {@link ChannelHandlerContext} which this {@link ChannelHandler} belongs to
*/
void endFlush(ChannelHandlerContext ctx) throws Exception;
}
}
| |
package org.hl7.fhir.r4.conformance;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hl7.fhir.r4.context.IWorkerContext;
import org.hl7.fhir.r4.formats.IParser;
import org.hl7.fhir.r4.model.Base;
import org.hl7.fhir.r4.model.Coding;
import org.hl7.fhir.r4.model.ElementDefinition;
import org.hl7.fhir.r4.model.ElementDefinition.ElementDefinitionBindingComponent;
import org.hl7.fhir.r4.model.ElementDefinition.ElementDefinitionConstraintComponent;
import org.hl7.fhir.r4.model.ElementDefinition.ElementDefinitionMappingComponent;
import org.hl7.fhir.r4.model.ElementDefinition.TypeRefComponent;
import org.hl7.fhir.r4.model.Enumerations.BindingStrength;
import org.hl7.fhir.r4.model.Enumerations.PublicationStatus;
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.PrimitiveType;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.StructureDefinition;
import org.hl7.fhir.r4.model.StructureDefinition.TypeDerivationRule;
import org.hl7.fhir.r4.model.Type;
import org.hl7.fhir.r4.model.UriType;
import org.hl7.fhir.r4.model.ValueSet;
import org.hl7.fhir.r4.model.ValueSet.ConceptReferenceComponent;
import org.hl7.fhir.r4.model.ValueSet.ConceptSetComponent;
import org.hl7.fhir.r4.model.ValueSet.ValueSetExpansionContainsComponent;
import org.hl7.fhir.r4.terminologies.ValueSetExpander.ValueSetExpansionOutcome;
import org.hl7.fhir.r4.utils.DefinitionNavigator;
import org.hl7.fhir.r4.utils.ToolingExtensions;
import org.hl7.fhir.exceptions.DefinitionException;
import org.hl7.fhir.utilities.CommaSeparatedStringBuilder;
import org.hl7.fhir.utilities.Utilities;
import org.hl7.fhir.utilities.validation.ValidationMessage;
import org.hl7.fhir.utilities.validation.ValidationMessage.Source;
/**
* A engine that generates difference analysis between two sets of structure
* definitions, typically from 2 different implementation guides.
*
* How this class works is that you create it with access to a bunch of underying
* resources that includes all the structure definitions from both implementation
* guides
*
* Once the class is created, you repeatedly pass pairs of structure definitions,
* one from each IG, building up a web of difference analyses. This class will
* automatically process any internal comparisons that it encounters
*
* When all the comparisons have been performed, you can then generate a variety
* of output formats
*
* @author Grahame Grieve
*
*/
public class ProfileComparer {
private IWorkerContext context;
public ProfileComparer(IWorkerContext context) {
super();
this.context = context;
}
private static final int BOTH_NULL = 0;
private static final int EITHER_NULL = 1;
public class ProfileComparison {
private String id;
/**
* the first of two structures that were compared to generate this comparison
*
* In a few cases - selection of example content and value sets - left gets
* preference over right
*/
private StructureDefinition left;
/**
* the second of two structures that were compared to generate this comparison
*
* In a few cases - selection of example content and value sets - left gets
* preference over right
*/
private StructureDefinition right;
public String getId() {
return id;
}
private String leftName() {
return left.getName();
}
private String rightName() {
return right.getName();
}
/**
* messages generated during the comparison. There are 4 grades of messages:
* information - a list of differences between structures
* warnings - notifies that the comparer is unable to fully compare the structures (constraints differ, open value sets)
* errors - where the structures are incompatible
* fatal errors - some error that prevented full analysis
*
* @return
*/
private List<ValidationMessage> messages = new ArrayList<ValidationMessage>();
/**
* The structure that describes all instances that will conform to both structures
*/
private StructureDefinition subset;
/**
* The structure that describes all instances that will conform to either structures
*/
private StructureDefinition superset;
public StructureDefinition getLeft() {
return left;
}
public StructureDefinition getRight() {
return right;
}
public List<ValidationMessage> getMessages() {
return messages;
}
public StructureDefinition getSubset() {
return subset;
}
public StructureDefinition getSuperset() {
return superset;
}
private boolean ruleEqual(String path, ElementDefinition ed, String vLeft, String vRight, String description, boolean nullOK) {
if (vLeft == null && vRight == null && nullOK)
return true;
if (vLeft == null && vRight == null) {
messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, description+" and not null (null/null)", ValidationMessage.IssueSeverity.ERROR));
if (ed != null)
status(ed, ProfileUtilities.STATUS_ERROR);
}
if (vLeft == null || !vLeft.equals(vRight)) {
messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, description+" ("+vLeft+"/"+vRight+")", ValidationMessage.IssueSeverity.ERROR));
if (ed != null)
status(ed, ProfileUtilities.STATUS_ERROR);
}
return true;
}
private boolean ruleCompares(ElementDefinition ed, Type vLeft, Type vRight, String path, int nullStatus) throws IOException {
if (vLeft == null && vRight == null && nullStatus == BOTH_NULL)
return true;
if (vLeft == null && vRight == null) {
messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Must be the same and not null (null/null)", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
}
if (vLeft == null && nullStatus == EITHER_NULL)
return true;
if (vRight == null && nullStatus == EITHER_NULL)
return true;
if (vLeft == null || vRight == null || !Base.compareDeep(vLeft, vRight, false)) {
messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Must be the same ("+toString(vLeft)+"/"+toString(vRight)+")", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
}
return true;
}
private boolean rule(ElementDefinition ed, boolean test, String path, String message) {
if (!test) {
messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, message, ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
}
return test;
}
private boolean ruleEqual(ElementDefinition ed, boolean vLeft, boolean vRight, String path, String elementName) {
if (vLeft != vRight) {
messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, elementName+" must be the same ("+vLeft+"/"+vRight+")", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
}
return true;
}
private String toString(Type val) throws IOException {
if (val instanceof PrimitiveType)
return "\"" + ((PrimitiveType) val).getValueAsString()+"\"";
IParser jp = context.newJsonParser();
return jp.composeString(val, "value");
}
public String getErrorCount() {
int c = 0;
for (ValidationMessage vm : messages)
if (vm.getLevel() == ValidationMessage.IssueSeverity.ERROR)
c++;
return Integer.toString(c);
}
public String getWarningCount() {
int c = 0;
for (ValidationMessage vm : messages)
if (vm.getLevel() == ValidationMessage.IssueSeverity.WARNING)
c++;
return Integer.toString(c);
}
public String getHintCount() {
int c = 0;
for (ValidationMessage vm : messages)
if (vm.getLevel() == ValidationMessage.IssueSeverity.INFORMATION)
c++;
return Integer.toString(c);
}
}
/**
* Value sets used in the subset and superset
*/
private List<ValueSet> valuesets = new ArrayList<ValueSet>();
private List<ProfileComparison> comparisons = new ArrayList<ProfileComparison>();
private String id;
private String title;
private String leftLink;
private String leftName;
private String rightLink;
private String rightName;
public List<ValueSet> getValuesets() {
return valuesets;
}
public void status(ElementDefinition ed, int value) {
ed.setUserData(ProfileUtilities.UD_ERROR_STATUS, Math.max(value, ed.getUserInt("error-status")));
}
public List<ProfileComparison> getComparisons() {
return comparisons;
}
/**
* Compare left and right structure definitions to see whether they are consistent or not
*
* Note that left and right are arbitrary choices. In one respect, left
* is 'preferred' - the left's example value and data sets will be selected
* over the right ones in the common structure definition
* @throws DefinitionException
* @throws IOException
*
* @
*/
public ProfileComparison compareProfiles(StructureDefinition left, StructureDefinition right) throws DefinitionException, IOException {
ProfileComparison outcome = new ProfileComparison();
outcome.left = left;
outcome.right = right;
if (left == null)
throw new DefinitionException("No StructureDefinition provided (left)");
if (right == null)
throw new DefinitionException("No StructureDefinition provided (right)");
if (!left.hasSnapshot())
throw new DefinitionException("StructureDefinition has no snapshot (left: "+outcome.leftName()+")");
if (!right.hasSnapshot())
throw new DefinitionException("StructureDefinition has no snapshot (right: "+outcome.rightName()+")");
if (left.getSnapshot().getElement().isEmpty())
throw new DefinitionException("StructureDefinition snapshot is empty (left: "+outcome.leftName()+")");
if (right.getSnapshot().getElement().isEmpty())
throw new DefinitionException("StructureDefinition snapshot is empty (right: "+outcome.rightName()+")");
for (ProfileComparison pc : comparisons)
if (pc.left.getUrl().equals(left.getUrl()) && pc.right.getUrl().equals(right.getUrl()))
return pc;
outcome.id = Integer.toString(comparisons.size()+1);
comparisons.add(outcome);
DefinitionNavigator ln = new DefinitionNavigator(context, left);
DefinitionNavigator rn = new DefinitionNavigator(context, right);
// from here on in, any issues go in messages
outcome.superset = new StructureDefinition();
outcome.subset = new StructureDefinition();
if (outcome.ruleEqual(ln.path(), null,ln.path(), rn.path(), "Base Type is not compatible", false)) {
if (compareElements(outcome, ln.path(), ln, rn)) {
outcome.subset.setName("intersection of "+outcome.leftName()+" and "+outcome.rightName());
outcome.subset.setStatus(PublicationStatus.DRAFT);
outcome.subset.setKind(outcome.left.getKind());
outcome.subset.setType(outcome.left.getType());
outcome.subset.setBaseDefinition("http://hl7.org/fhir/StructureDefinition/"+outcome.subset.getType());
outcome.subset.setDerivation(TypeDerivationRule.CONSTRAINT);
outcome.subset.setAbstract(false);
outcome.superset.setName("union of "+outcome.leftName()+" and "+outcome.rightName());
outcome.superset.setStatus(PublicationStatus.DRAFT);
outcome.superset.setKind(outcome.left.getKind());
outcome.superset.setType(outcome.left.getType());
outcome.superset.setBaseDefinition("http://hl7.org/fhir/StructureDefinition/"+outcome.subset.getType());
outcome.superset.setAbstract(false);
outcome.superset.setDerivation(TypeDerivationRule.CONSTRAINT);
} else {
outcome.subset = null;
outcome.superset = null;
}
}
return outcome;
}
/**
* left and right refer to the same element. Are they compatible?
* @param outcome
* @param outcome
* @param path
* @param left
* @param right
* @- if there's a problem that needs fixing in this code
* @throws DefinitionException
* @throws IOException
*/
private boolean compareElements(ProfileComparison outcome, String path, DefinitionNavigator left, DefinitionNavigator right) throws DefinitionException, IOException {
// preconditions:
assert(path != null);
assert(left != null);
assert(right != null);
assert(left.path().equals(right.path()));
// we ignore slicing right now - we're going to clone the root one anyway, and then think about clones
// simple stuff
ElementDefinition subset = new ElementDefinition();
subset.setPath(left.path());
// not allowed to be different:
subset.getRepresentation().addAll(left.current().getRepresentation()); // can't be bothered even testing this one
if (!outcome.ruleCompares(subset, left.current().getDefaultValue(), right.current().getDefaultValue(), path+".defaultValue[x]", BOTH_NULL))
return false;
subset.setDefaultValue(left.current().getDefaultValue());
if (!outcome.ruleEqual(path, subset, left.current().getMeaningWhenMissing(), right.current().getMeaningWhenMissing(), "meaningWhenMissing Must be the same", true))
return false;
subset.setMeaningWhenMissing(left.current().getMeaningWhenMissing());
if (!outcome.ruleEqual(subset, left.current().getIsModifier(), right.current().getIsModifier(), path, "isModifier"))
return false;
subset.setIsModifier(left.current().getIsModifier());
if (!outcome.ruleEqual(subset, left.current().getIsSummary(), right.current().getIsSummary(), path, "isSummary"))
return false;
subset.setIsSummary(left.current().getIsSummary());
// descriptive properties from ElementDefinition - merge them:
subset.setLabel(mergeText(subset, outcome, path, "label", left.current().getLabel(), right.current().getLabel()));
subset.setShort(mergeText(subset, outcome, path, "short", left.current().getShort(), right.current().getShort()));
subset.setDefinition(mergeText(subset, outcome, path, "definition", left.current().getDefinition(), right.current().getDefinition()));
subset.setComment(mergeText(subset, outcome, path, "comments", left.current().getComment(), right.current().getComment()));
subset.setRequirements(mergeText(subset, outcome, path, "requirements", left.current().getRequirements(), right.current().getRequirements()));
subset.getCode().addAll(mergeCodings(left.current().getCode(), right.current().getCode()));
subset.getAlias().addAll(mergeStrings(left.current().getAlias(), right.current().getAlias()));
subset.getMapping().addAll(mergeMappings(left.current().getMapping(), right.current().getMapping()));
// left will win for example
subset.setExample(left.current().hasExample() ? left.current().getExample() : right.current().getExample());
subset.setMustSupport(left.current().getMustSupport() || right.current().getMustSupport());
ElementDefinition superset = subset.copy();
// compare and intersect
superset.setMin(unionMin(left.current().getMin(), right.current().getMin()));
superset.setMax(unionMax(left.current().getMax(), right.current().getMax()));
subset.setMin(intersectMin(left.current().getMin(), right.current().getMin()));
subset.setMax(intersectMax(left.current().getMax(), right.current().getMax()));
outcome.rule(subset, subset.getMax().equals("*") || Integer.parseInt(subset.getMax()) >= subset.getMin(), path, "Cardinality Mismatch: "+card(left)+"/"+card(right));
superset.getType().addAll(unionTypes(path, left.current().getType(), right.current().getType()));
subset.getType().addAll(intersectTypes(subset, outcome, path, left.current().getType(), right.current().getType()));
outcome.rule(subset, !subset.getType().isEmpty() || (!left.current().hasType() && !right.current().hasType()), path, "Type Mismatch:\r\n "+typeCode(left)+"\r\n "+typeCode(right));
// <fixed[x]><!-- ?? 0..1 * Value must be exactly this --></fixed[x]>
// <pattern[x]><!-- ?? 0..1 * Value must have at least these property values --></pattern[x]>
superset.setMaxLengthElement(unionMaxLength(left.current().getMaxLength(), right.current().getMaxLength()));
subset.setMaxLengthElement(intersectMaxLength(left.current().getMaxLength(), right.current().getMaxLength()));
if (left.current().hasBinding() || right.current().hasBinding()) {
compareBindings(outcome, subset, superset, path, left.current(), right.current());
}
// note these are backwards
superset.getConstraint().addAll(intersectConstraints(path, left.current().getConstraint(), right.current().getConstraint()));
subset.getConstraint().addAll(unionConstraints(subset, outcome, path, left.current().getConstraint(), right.current().getConstraint()));
// now process the slices
if (left.current().hasSlicing() || right.current().hasSlicing()) {
if (isExtension(left.path()))
return compareExtensions(outcome, path, superset, subset, left, right);
// return true;
else
throw new DefinitionException("Slicing is not handled yet");
// todo: name
}
// add the children
outcome.subset.getSnapshot().getElement().add(subset);
outcome.superset.getSnapshot().getElement().add(superset);
return compareChildren(subset, outcome, path, left, right);
}
private class ExtensionUsage {
private DefinitionNavigator defn;
private int minSuperset;
private int minSubset;
private String maxSuperset;
private String maxSubset;
private boolean both = false;
public ExtensionUsage(DefinitionNavigator defn, int min, String max) {
super();
this.defn = defn;
this.minSubset = min;
this.minSuperset = min;
this.maxSubset = max;
this.maxSuperset = max;
}
}
private boolean compareExtensions(ProfileComparison outcome, String path, ElementDefinition superset, ElementDefinition subset, DefinitionNavigator left, DefinitionNavigator right) throws DefinitionException {
// for now, we don't handle sealed (or ordered) extensions
// for an extension the superset is all extensions, and the subset is.. all extensions - well, unless thay are sealed.
// but it's not useful to report that. instead, we collate the defined ones, and just adjust the cardinalities
Map<String, ExtensionUsage> map = new HashMap<String, ExtensionUsage>();
if (left.slices() != null)
for (DefinitionNavigator ex : left.slices()) {
String url = ex.current().getType().get(0).getProfile();
if (map.containsKey(url))
throw new DefinitionException("Duplicate Extension "+url+" at "+path);
else
map.put(url, new ExtensionUsage(ex, ex.current().getMin(), ex.current().getMax()));
}
if (right.slices() != null)
for (DefinitionNavigator ex : right.slices()) {
String url = ex.current().getType().get(0).getProfile();
if (map.containsKey(url)) {
ExtensionUsage exd = map.get(url);
exd.minSuperset = unionMin(exd.defn.current().getMin(), ex.current().getMin());
exd.maxSuperset = unionMax(exd.defn.current().getMax(), ex.current().getMax());
exd.minSubset = intersectMin(exd.defn.current().getMin(), ex.current().getMin());
exd.maxSubset = intersectMax(exd.defn.current().getMax(), ex.current().getMax());
exd.both = true;
outcome.rule(subset, exd.maxSubset.equals("*") || Integer.parseInt(exd.maxSubset) >= exd.minSubset, path, "Cardinality Mismatch on extension: "+card(exd.defn)+"/"+card(ex));
} else {
map.put(url, new ExtensionUsage(ex, ex.current().getMin(), ex.current().getMax()));
}
}
List<String> names = new ArrayList<String>();
names.addAll(map.keySet());
Collections.sort(names);
for (String name : names) {
ExtensionUsage exd = map.get(name);
if (exd.both)
outcome.subset.getSnapshot().getElement().add(exd.defn.current().copy().setMin(exd.minSubset).setMax(exd.maxSubset));
outcome.superset.getSnapshot().getElement().add(exd.defn.current().copy().setMin(exd.minSuperset).setMax(exd.maxSuperset));
}
return true;
}
private boolean isExtension(String path) {
return path.endsWith(".extension") || path.endsWith(".modifierExtension");
}
private boolean compareChildren(ElementDefinition ed, ProfileComparison outcome, String path, DefinitionNavigator left, DefinitionNavigator right) throws DefinitionException, IOException {
List<DefinitionNavigator> lc = left.children();
List<DefinitionNavigator> rc = right.children();
// it's possible that one of these profiles walks into a data type and the other doesn't
// if it does, we have to load the children for that data into the profile that doesn't
// walk into it
if (lc.isEmpty() && !rc.isEmpty() && right.current().getType().size() == 1 && left.hasTypeChildren(right.current().getType().get(0)))
lc = left.childrenFromType(right.current().getType().get(0));
if (rc.isEmpty() && !lc.isEmpty() && left.current().getType().size() == 1 && right.hasTypeChildren(left.current().getType().get(0)))
rc = right.childrenFromType(left.current().getType().get(0));
if (lc.size() != rc.size()) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Different number of children at "+path+" ("+Integer.toString(lc.size())+"/"+Integer.toString(rc.size())+")", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
return false;
} else {
for (int i = 0; i < lc.size(); i++) {
DefinitionNavigator l = lc.get(i);
DefinitionNavigator r = rc.get(i);
String cpath = comparePaths(l.path(), r.path(), path, l.nameTail(), r.nameTail());
if (cpath != null) {
if (!compareElements(outcome, cpath, l, r))
return false;
} else {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Different path at "+path+"["+Integer.toString(i)+"] ("+l.path()+"/"+r.path()+")", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
return false;
}
}
}
return true;
}
private String comparePaths(String path1, String path2, String path, String tail1, String tail2) {
if (tail1.equals(tail2)) {
return path+"."+tail1;
} else if (tail1.endsWith("[x]") && tail2.startsWith(tail1.substring(0, tail1.length()-3))) {
return path+"."+tail1;
} else if (tail2.endsWith("[x]") && tail1.startsWith(tail2.substring(0, tail2.length()-3))) {
return path+"."+tail2;
} else
return null;
}
private boolean compareBindings(ProfileComparison outcome, ElementDefinition subset, ElementDefinition superset, String path, ElementDefinition lDef, ElementDefinition rDef) {
assert(lDef.hasBinding() || rDef.hasBinding());
if (!lDef.hasBinding()) {
subset.setBinding(rDef.getBinding());
// technically, the super set is unbound, but that's not very useful - so we use the provided on as an example
superset.setBinding(rDef.getBinding().copy());
superset.getBinding().setStrength(BindingStrength.EXAMPLE);
return true;
}
if (!rDef.hasBinding()) {
subset.setBinding(lDef.getBinding());
superset.setBinding(lDef.getBinding().copy());
superset.getBinding().setStrength(BindingStrength.EXAMPLE);
return true;
}
ElementDefinitionBindingComponent left = lDef.getBinding();
ElementDefinitionBindingComponent right = rDef.getBinding();
if (Base.compareDeep(left, right, false)) {
subset.setBinding(left);
superset.setBinding(right);
}
// if they're both examples/preferred then:
// subset: left wins if they're both the same
// superset:
if (isPreferredOrExample(left) && isPreferredOrExample(right)) {
if (right.getStrength() == BindingStrength.PREFERRED && left.getStrength() == BindingStrength.EXAMPLE && !Base.compareDeep(left.getValueSet(), right.getValueSet(), false)) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Example/preferred bindings differ at "+path+" using binding from "+outcome.rightName(), ValidationMessage.IssueSeverity.INFORMATION));
status(subset, ProfileUtilities.STATUS_HINT);
subset.setBinding(right);
superset.setBinding(unionBindings(superset, outcome, path, left, right));
} else {
if ((right.getStrength() != BindingStrength.EXAMPLE || left.getStrength() != BindingStrength.EXAMPLE) && !Base.compareDeep(left.getValueSet(), right.getValueSet(), false) ) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Example/preferred bindings differ at "+path+" using binding from "+outcome.leftName(), ValidationMessage.IssueSeverity.INFORMATION));
status(subset, ProfileUtilities.STATUS_HINT);
}
subset.setBinding(left);
superset.setBinding(unionBindings(superset, outcome, path, left, right));
}
return true;
}
// if either of them are extensible/required, then it wins
if (isPreferredOrExample(left)) {
subset.setBinding(right);
superset.setBinding(unionBindings(superset, outcome, path, left, right));
return true;
}
if (isPreferredOrExample(right)) {
subset.setBinding(left);
superset.setBinding(unionBindings(superset, outcome, path, left, right));
return true;
}
// ok, both are extensible or required.
ElementDefinitionBindingComponent subBinding = new ElementDefinitionBindingComponent();
subset.setBinding(subBinding);
ElementDefinitionBindingComponent superBinding = new ElementDefinitionBindingComponent();
superset.setBinding(superBinding);
subBinding.setDescription(mergeText(subset, outcome, path, "description", left.getDescription(), right.getDescription()));
superBinding.setDescription(mergeText(subset, outcome, null, "description", left.getDescription(), right.getDescription()));
if (left.getStrength() == BindingStrength.REQUIRED || right.getStrength() == BindingStrength.REQUIRED)
subBinding.setStrength(BindingStrength.REQUIRED);
else
subBinding.setStrength(BindingStrength.EXTENSIBLE);
if (left.getStrength() == BindingStrength.EXTENSIBLE || right.getStrength() == BindingStrength.EXTENSIBLE)
superBinding.setStrength(BindingStrength.EXTENSIBLE);
else
superBinding.setStrength(BindingStrength.REQUIRED);
if (Base.compareDeep(left.getValueSet(), right.getValueSet(), false)) {
subBinding.setValueSet(left.getValueSet());
superBinding.setValueSet(left.getValueSet());
return true;
} else if (!left.hasValueSet()) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "No left Value set at "+path, ValidationMessage.IssueSeverity.ERROR));
return true;
} else if (!right.hasValueSet()) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "No right Value set at "+path, ValidationMessage.IssueSeverity.ERROR));
return true;
} else {
// ok, now we compare the value sets. This may be unresolvable.
ValueSet lvs = resolveVS(outcome.left, left.getValueSet());
ValueSet rvs = resolveVS(outcome.right, right.getValueSet());
if (lvs == null) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Unable to resolve left value set "+left.getValueSet().toString()+" at "+path, ValidationMessage.IssueSeverity.ERROR));
return true;
} else if (rvs == null) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Unable to resolve right value set "+right.getValueSet().toString()+" at "+path, ValidationMessage.IssueSeverity.ERROR));
return true;
} else {
// first, we'll try to do it by definition
ValueSet cvs = intersectByDefinition(lvs, rvs);
if(cvs == null) {
// if that didn't work, we'll do it by expansion
ValueSetExpansionOutcome le;
ValueSetExpansionOutcome re;
try {
le = context.expandVS(lvs, true, false);
re = context.expandVS(rvs, true, false);
if (!closed(le.getValueset()) || !closed(re.getValueset()))
throw new DefinitionException("unclosed value sets are not handled yet");
cvs = intersectByExpansion(lvs, rvs);
if (!cvs.getCompose().hasInclude()) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "The value sets "+lvs.getUrl()+" and "+rvs.getUrl()+" do not intersect", ValidationMessage.IssueSeverity.ERROR));
status(subset, ProfileUtilities.STATUS_ERROR);
return false;
}
} catch (Exception e){
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "Unable to expand or process value sets "+lvs.getUrl()+" and "+rvs.getUrl()+": "+e.getMessage(), ValidationMessage.IssueSeverity.ERROR));
status(subset, ProfileUtilities.STATUS_ERROR);
return false;
}
}
subBinding.setValueSet(new Reference().setReference("#"+addValueSet(cvs)));
superBinding.setValueSet(new Reference().setReference("#"+addValueSet(unite(superset, outcome, path, lvs, rvs))));
}
}
return false;
}
private ElementDefinitionBindingComponent unionBindings(ElementDefinition ed, ProfileComparison outcome, String path, ElementDefinitionBindingComponent left, ElementDefinitionBindingComponent right) {
ElementDefinitionBindingComponent union = new ElementDefinitionBindingComponent();
if (left.getStrength().compareTo(right.getStrength()) < 0)
union.setStrength(left.getStrength());
else
union.setStrength(right.getStrength());
union.setDescription(mergeText(ed, outcome, path, "binding.description", left.getDescription(), right.getDescription()));
if (Base.compareDeep(left.getValueSet(), right.getValueSet(), false))
union.setValueSet(left.getValueSet());
else {
ValueSet lvs = resolveVS(outcome.left, left.getValueSet());
ValueSet rvs = resolveVS(outcome.left, right.getValueSet());
if (lvs != null && rvs != null)
union.setValueSet(new Reference().setReference("#"+addValueSet(unite(ed, outcome, path, lvs, rvs))));
else if (lvs != null)
union.setValueSet(new Reference().setReference("#"+addValueSet(lvs)));
else if (rvs != null)
union.setValueSet(new Reference().setReference("#"+addValueSet(rvs)));
}
return union;
}
private ValueSet unite(ElementDefinition ed, ProfileComparison outcome, String path, ValueSet lvs, ValueSet rvs) {
ValueSet vs = new ValueSet();
if (lvs.hasCompose()) {
for (ConceptSetComponent inc : lvs.getCompose().getInclude())
vs.getCompose().getInclude().add(inc);
if (lvs.getCompose().hasExclude()) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "The value sets "+lvs.getUrl()+" has exclude statements, and no union involving it can be correctly determined", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
}
}
if (rvs.hasCompose()) {
for (ConceptSetComponent inc : rvs.getCompose().getInclude())
if (!mergeIntoExisting(vs.getCompose().getInclude(), inc))
vs.getCompose().getInclude().add(inc);
if (rvs.getCompose().hasExclude()) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "The value sets "+lvs.getUrl()+" has exclude statements, and no union involving it can be correctly determined", ValidationMessage.IssueSeverity.ERROR));
status(ed, ProfileUtilities.STATUS_ERROR);
}
}
return vs;
}
private boolean mergeIntoExisting(List<ConceptSetComponent> include, ConceptSetComponent inc) {
for (ConceptSetComponent dst : include) {
if (Base.compareDeep(dst, inc, false))
return true; // they're actually the same
if (dst.getSystem().equals(inc.getSystem())) {
if (inc.hasFilter() || dst.hasFilter()) {
return false; // just add the new one as a a parallel
} else if (inc.hasConcept() && dst.hasConcept()) {
for (ConceptReferenceComponent cc : inc.getConcept()) {
boolean found = false;
for (ConceptReferenceComponent dd : dst.getConcept()) {
if (dd.getCode().equals(cc.getCode()))
found = true;
if (found) {
if (cc.hasDisplay() && !dd.hasDisplay())
dd.setDisplay(cc.getDisplay());
break;
}
}
if (!found)
dst.getConcept().add(cc.copy());
}
} else
dst.getConcept().clear(); // one of them includes the entire code system
}
}
return false;
}
private ValueSet resolveVS(StructureDefinition ctxtLeft, Type vsRef) {
if (vsRef == null)
return null;
if (vsRef instanceof UriType)
return null;
else {
Reference ref = (Reference) vsRef;
if (!ref.hasReference())
return null;
return context.fetchResource(ValueSet.class, ref.getReference());
}
}
private ValueSet intersectByDefinition(ValueSet lvs, ValueSet rvs) {
// this is just a stub. The idea is that we try to avoid expanding big open value sets from SCT, RxNorm, LOINC.
// there's a bit of long hand logic coming here, but that's ok.
return null;
}
private ValueSet intersectByExpansion(ValueSet lvs, ValueSet rvs) {
// this is pretty straight forward - we intersect the lists, and build a compose out of the intersection
ValueSet vs = new ValueSet();
vs.setStatus(PublicationStatus.DRAFT);
Map<String, ValueSetExpansionContainsComponent> left = new HashMap<String, ValueSetExpansionContainsComponent>();
scan(lvs.getExpansion().getContains(), left);
Map<String, ValueSetExpansionContainsComponent> right = new HashMap<String, ValueSetExpansionContainsComponent>();
scan(rvs.getExpansion().getContains(), right);
Map<String, ConceptSetComponent> inc = new HashMap<String, ConceptSetComponent>();
for (String s : left.keySet()) {
if (right.containsKey(s)) {
ValueSetExpansionContainsComponent cc = left.get(s);
ConceptSetComponent c = inc.get(cc.getSystem());
if (c == null) {
c = vs.getCompose().addInclude().setSystem(cc.getSystem());
inc.put(cc.getSystem(), c);
}
c.addConcept().setCode(cc.getCode()).setDisplay(cc.getDisplay());
}
}
return vs;
}
private void scan(List<ValueSetExpansionContainsComponent> list, Map<String, ValueSetExpansionContainsComponent> map) {
for (ValueSetExpansionContainsComponent cc : list) {
if (cc.hasSystem() && cc.hasCode()) {
String s = cc.getSystem()+"::"+cc.getCode();
if (!map.containsKey(s))
map.put(s, cc);
}
if (cc.hasContains())
scan(cc.getContains(), map);
}
}
private boolean closed(ValueSet vs) {
return !ToolingExtensions.findBooleanExtension(vs.getExpansion(), ToolingExtensions.EXT_UNCLOSED);
}
private boolean isPreferredOrExample(ElementDefinitionBindingComponent binding) {
return binding.getStrength() == BindingStrength.EXAMPLE || binding.getStrength() == BindingStrength.PREFERRED;
}
private Collection<? extends TypeRefComponent> intersectTypes(ElementDefinition ed, ProfileComparison outcome, String path, List<TypeRefComponent> left, List<TypeRefComponent> right) throws DefinitionException, IOException {
List<TypeRefComponent> result = new ArrayList<TypeRefComponent>();
for (TypeRefComponent l : left) {
if (l.hasAggregation())
throw new DefinitionException("Aggregation not supported: "+path);
boolean pfound = false;
boolean tfound = false;
TypeRefComponent c = l.copy();
for (TypeRefComponent r : right) {
if (r.hasAggregation())
throw new DefinitionException("Aggregation not supported: "+path);
if (!l.hasProfile() && !r.hasProfile()) {
pfound = true;
} else if (!r.hasProfile()) {
pfound = true;
} else if (!l.hasProfile()) {
pfound = true;
c.setProfile(r.getProfile());
} else {
StructureDefinition sdl = resolveProfile(ed, outcome, path, l.getProfile(), outcome.leftName());
StructureDefinition sdr = resolveProfile(ed, outcome, path, r.getProfile(), outcome.rightName());
if (sdl != null && sdr != null) {
if (sdl == sdr) {
pfound = true;
} else if (derivesFrom(sdl, sdr)) {
pfound = true;
} else if (derivesFrom(sdr, sdl)) {
c.setProfile(r.getProfile());
pfound = true;
} else if (sdl.getType().equals(sdr.getType())) {
ProfileComparison comp = compareProfiles(sdl, sdr);
if (comp.getSubset() != null) {
pfound = true;
c.setProfile("#"+comp.id);
}
}
}
}
if (!l.hasTargetProfile() && !r.hasTargetProfile()) {
tfound = true;
} else if (!r.hasTargetProfile()) {
tfound = true;
} else if (!l.hasTargetProfile()) {
tfound = true;
c.setTargetProfile(r.getTargetProfile());
} else {
StructureDefinition sdl = resolveProfile(ed, outcome, path, l.getProfile(), outcome.leftName());
StructureDefinition sdr = resolveProfile(ed, outcome, path, r.getProfile(), outcome.rightName());
if (sdl != null && sdr != null) {
if (sdl == sdr) {
tfound = true;
} else if (derivesFrom(sdl, sdr)) {
tfound = true;
} else if (derivesFrom(sdr, sdl)) {
c.setTargetProfile(r.getTargetProfile());
tfound = true;
} else if (sdl.getType().equals(sdr.getType())) {
ProfileComparison comp = compareProfiles(sdl, sdr);
if (comp.getSubset() != null) {
tfound = true;
c.setTargetProfile("#"+comp.id);
}
}
}
}
}
if (pfound && tfound)
result.add(c);
}
return result;
}
private StructureDefinition resolveProfile(ElementDefinition ed, ProfileComparison outcome, String path, String url, String name) {
StructureDefinition res = context.fetchResource(StructureDefinition.class, url);
if (res == null) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.INFORMATIONAL, path, "Unable to resolve profile "+url+" in profile "+name, ValidationMessage.IssueSeverity.WARNING));
status(ed, ProfileUtilities.STATUS_HINT);
}
return res;
}
private Collection<? extends TypeRefComponent> unionTypes(String path, List<TypeRefComponent> left, List<TypeRefComponent> right) throws DefinitionException, IOException {
List<TypeRefComponent> result = new ArrayList<TypeRefComponent>();
for (TypeRefComponent l : left)
checkAddTypeUnion(path, result, l);
for (TypeRefComponent r : right)
checkAddTypeUnion(path, result, r);
return result;
}
private void checkAddTypeUnion(String path, List<TypeRefComponent> results, TypeRefComponent nw) throws DefinitionException, IOException {
boolean pfound = false;
boolean tfound = false;
nw = nw.copy();
if (nw.hasAggregation())
throw new DefinitionException("Aggregation not supported: "+path);
for (TypeRefComponent ex : results) {
if (Utilities.equals(ex.getCode(), nw.getCode())) {
if (!ex.hasProfile() && !nw.hasProfile())
pfound = true;
else if (!ex.hasProfile()) {
pfound = true;
} else if (!nw.hasProfile()) {
pfound = true;
ex.setProfile(null);
} else {
// both have profiles. Is one derived from the other?
StructureDefinition sdex = context.fetchResource(StructureDefinition.class, ex.getProfile());
StructureDefinition sdnw = context.fetchResource(StructureDefinition.class, nw.getProfile());
if (sdex != null && sdnw != null) {
if (sdex == sdnw) {
pfound = true;
} else if (derivesFrom(sdex, sdnw)) {
ex.setProfile(nw.getProfile());
pfound = true;
} else if (derivesFrom(sdnw, sdex)) {
pfound = true;
} else if (sdnw.getSnapshot().getElement().get(0).getPath().equals(sdex.getSnapshot().getElement().get(0).getPath())) {
ProfileComparison comp = compareProfiles(sdex, sdnw);
if (comp.getSuperset() != null) {
pfound = true;
ex.setProfile("#"+comp.id);
}
}
}
}
if (!ex.hasTargetProfile() && !nw.hasTargetProfile())
tfound = true;
else if (!ex.hasTargetProfile()) {
tfound = true;
} else if (!nw.hasTargetProfile()) {
tfound = true;
ex.setTargetProfile(null);
} else {
// both have profiles. Is one derived from the other?
StructureDefinition sdex = context.fetchResource(StructureDefinition.class, ex.getTargetProfile());
StructureDefinition sdnw = context.fetchResource(StructureDefinition.class, nw.getTargetProfile());
if (sdex != null && sdnw != null) {
if (sdex == sdnw) {
tfound = true;
} else if (derivesFrom(sdex, sdnw)) {
ex.setTargetProfile(nw.getTargetProfile());
tfound = true;
} else if (derivesFrom(sdnw, sdex)) {
tfound = true;
} else if (sdnw.getSnapshot().getElement().get(0).getPath().equals(sdex.getSnapshot().getElement().get(0).getPath())) {
ProfileComparison comp = compareProfiles(sdex, sdnw);
if (comp.getSuperset() != null) {
tfound = true;
ex.setTargetProfile("#"+comp.id);
}
}
}
}
}
}
if (!tfound || !pfound)
results.add(nw);
}
private boolean derivesFrom(StructureDefinition left, StructureDefinition right) {
// left derives from right if it's base is the same as right
// todo: recursive...
return left.hasBaseDefinition() && left.getBaseDefinition().equals(right.getUrl());
}
// result.addAll(left);
// for (TypeRefComponent r : right) {
// boolean found = false;
// TypeRefComponent c = r.copy();
// for (TypeRefComponent l : left)
// if (Utilities.equals(l.getCode(), r.getCode())) {
//
// }
// if (l.getCode().equals("Reference") && r.getCode().equals("Reference")) {
// if (Base.compareDeep(l.getProfile(), r.getProfile(), false)) {
// found = true;
// }
// } else
// found = true;
// // todo: compare profiles
// // todo: compare aggregation values
// }
// if (!found)
// result.add(c);
// }
// }
private String mergeText(ElementDefinition ed, ProfileComparison outcome, String path, String name, String left, String right) {
if (left == null && right == null)
return null;
if (left == null)
return right;
if (right == null)
return left;
if (left.equalsIgnoreCase(right))
return left;
if (path != null) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.INFORMATIONAL, path, "Elements differ in definition for "+name+":\r\n \""+left+"\"\r\n \""+right+"\"",
"Elements differ in definition for "+name+":<br/>\""+Utilities.escapeXml(left)+"\"<br/>\""+Utilities.escapeXml(right)+"\"", ValidationMessage.IssueSeverity.INFORMATION));
status(ed, ProfileUtilities.STATUS_HINT);
}
return "left: "+left+"; right: "+right;
}
private List<Coding> mergeCodings(List<Coding> left, List<Coding> right) {
List<Coding> result = new ArrayList<Coding>();
result.addAll(left);
for (Coding c : right) {
boolean found = false;
for (Coding ct : left)
if (Utilities.equals(c.getSystem(), ct.getSystem()) && Utilities.equals(c.getCode(), ct.getCode()))
found = true;
if (!found)
result.add(c);
}
return result;
}
private List<StringType> mergeStrings(List<StringType> left, List<StringType> right) {
List<StringType> result = new ArrayList<StringType>();
result.addAll(left);
for (StringType c : right) {
boolean found = false;
for (StringType ct : left)
if (Utilities.equals(c.getValue(), ct.getValue()))
found = true;
if (!found)
result.add(c);
}
return result;
}
private List<ElementDefinitionMappingComponent> mergeMappings(List<ElementDefinitionMappingComponent> left, List<ElementDefinitionMappingComponent> right) {
List<ElementDefinitionMappingComponent> result = new ArrayList<ElementDefinitionMappingComponent>();
result.addAll(left);
for (ElementDefinitionMappingComponent c : right) {
boolean found = false;
for (ElementDefinitionMappingComponent ct : left)
if (Utilities.equals(c.getIdentity(), ct.getIdentity()) && Utilities.equals(c.getLanguage(), ct.getLanguage()) && Utilities.equals(c.getMap(), ct.getMap()))
found = true;
if (!found)
result.add(c);
}
return result;
}
// we can't really know about constraints. We create warnings, and collate them
private List<ElementDefinitionConstraintComponent> unionConstraints(ElementDefinition ed, ProfileComparison outcome, String path, List<ElementDefinitionConstraintComponent> left, List<ElementDefinitionConstraintComponent> right) {
List<ElementDefinitionConstraintComponent> result = new ArrayList<ElementDefinitionConstraintComponent>();
for (ElementDefinitionConstraintComponent l : left) {
boolean found = false;
for (ElementDefinitionConstraintComponent r : right)
if (Utilities.equals(r.getId(), l.getId()) || (Utilities.equals(r.getXpath(), l.getXpath()) && r.getSeverity() == l.getSeverity()))
found = true;
if (!found) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "StructureDefinition "+outcome.leftName()+" has a constraint that is not found in "+outcome.rightName()+" and it is uncertain whether they are compatible ("+l.getXpath()+")", ValidationMessage.IssueSeverity.INFORMATION));
status(ed, ProfileUtilities.STATUS_WARNING);
}
result.add(l);
}
for (ElementDefinitionConstraintComponent r : right) {
boolean found = false;
for (ElementDefinitionConstraintComponent l : left)
if (Utilities.equals(r.getId(), l.getId()) || (Utilities.equals(r.getXpath(), l.getXpath()) && r.getSeverity() == l.getSeverity()))
found = true;
if (!found) {
outcome.messages.add(new ValidationMessage(Source.ProfileComparer, ValidationMessage.IssueType.STRUCTURE, path, "StructureDefinition "+outcome.rightName()+" has a constraint that is not found in "+outcome.leftName()+" and it is uncertain whether they are compatible ("+r.getXpath()+")", ValidationMessage.IssueSeverity.INFORMATION));
status(ed, ProfileUtilities.STATUS_WARNING);
result.add(r);
}
}
return result;
}
private List<ElementDefinitionConstraintComponent> intersectConstraints(String path, List<ElementDefinitionConstraintComponent> left, List<ElementDefinitionConstraintComponent> right) {
List<ElementDefinitionConstraintComponent> result = new ArrayList<ElementDefinitionConstraintComponent>();
for (ElementDefinitionConstraintComponent l : left) {
boolean found = false;
for (ElementDefinitionConstraintComponent r : right)
if (Utilities.equals(r.getId(), l.getId()) || (Utilities.equals(r.getXpath(), l.getXpath()) && r.getSeverity() == l.getSeverity()))
found = true;
if (found)
result.add(l);
}
return result;
}
private String card(DefinitionNavigator defn) {
return Integer.toString(defn.current().getMin())+".."+defn.current().getMax();
}
private String typeCode(DefinitionNavigator defn) {
CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder();
for (TypeRefComponent t : defn.current().getType())
b.append(t.getCode()+(t.hasProfile() ? "("+t.getProfile()+")" : "")+(t.hasTargetProfile() ? "("+t.getTargetProfile()+")" : "")); // todo: other properties
return b.toString();
}
private int intersectMin(int left, int right) {
if (left > right)
return left;
else
return right;
}
private int unionMin(int left, int right) {
if (left > right)
return right;
else
return left;
}
private String intersectMax(String left, String right) {
int l = "*".equals(left) ? Integer.MAX_VALUE : Integer.parseInt(left);
int r = "*".equals(right) ? Integer.MAX_VALUE : Integer.parseInt(right);
if (l < r)
return left;
else
return right;
}
private String unionMax(String left, String right) {
int l = "*".equals(left) ? Integer.MAX_VALUE : Integer.parseInt(left);
int r = "*".equals(right) ? Integer.MAX_VALUE : Integer.parseInt(right);
if (l < r)
return right;
else
return left;
}
private IntegerType intersectMaxLength(int left, int right) {
if (left == 0)
left = Integer.MAX_VALUE;
if (right == 0)
right = Integer.MAX_VALUE;
if (left < right)
return left == Integer.MAX_VALUE ? null : new IntegerType(left);
else
return right == Integer.MAX_VALUE ? null : new IntegerType(right);
}
private IntegerType unionMaxLength(int left, int right) {
if (left == 0)
left = Integer.MAX_VALUE;
if (right == 0)
right = Integer.MAX_VALUE;
if (left < right)
return right == Integer.MAX_VALUE ? null : new IntegerType(right);
else
return left == Integer.MAX_VALUE ? null : new IntegerType(left);
}
public String addValueSet(ValueSet cvs) {
String id = Integer.toString(valuesets.size()+1);
cvs.setId(id);
valuesets.add(cvs);
return id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getLeftLink() {
return leftLink;
}
public void setLeftLink(String leftLink) {
this.leftLink = leftLink;
}
public String getLeftName() {
return leftName;
}
public void setLeftName(String leftName) {
this.leftName = leftName;
}
public String getRightLink() {
return rightLink;
}
public void setRightLink(String rightLink) {
this.rightLink = rightLink;
}
public String getRightName() {
return rightName;
}
public void setRightName(String rightName) {
this.rightName = rightName;
}
}
| |
package edu.purdue.safewalk.Fragments;
import java.util.ArrayList;
import org.apache.http.entity.StringEntity;
import android.app.ActionBar;
import android.app.ListFragment;
import android.app.ProgressDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ListView;
import android.widget.Toast;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import android.view.*;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import edu.purdue.safewalk.R;
import edu.purdue.safewalk.DataStructures.Requester;
import edu.purdue.safewalk.Interfaces.OnAllRequestsReceivedListener;
import edu.purdue.safewalk.Interfaces.OnRequestAcceptedHandler;
import edu.purdue.safewalk.Tasks.GetAllRequestsTask;
import edu.purdue.safewalk.Adapters.RequesterListAdapter;
import edu.purdue.safewalk.Widgets.PopupDialog;
public class ListViewRequesterFragment extends ListFragment implements OnAllRequestsReceivedListener,OnRequestAcceptedHandler {
String[] NAMES;
private final static String TAG = "ListViewRequesterFragment";
public static final String SUCCESS = "edu.purdue.SafeWalk.SUCCESS";
public static final String FAILURE = "edu.purdue.SafeWalk.FAILURE";
public static final String RESPONSE = "edu.purdue.SafeWalk.RESPONCE_REQUESTS";
PopupDialog dialog;
public static StringEntity se = null;
public static String httpResponse = null;
static ArrayList<Requester> requests_ = new ArrayList<Requester>();
Requester r;
private ProgressDialog progDialog;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup parent,
Bundle bundle) {
return inflater.inflate(R.layout.list_view_requester_activity, null);
}
@Override
public void onStart() {
super.onStart();
Log.d(TAG, "onStart()");
progDialog = new ProgressDialog(getActivity());
progDialog.show();
getRequests();// starts to communicate to server.
ActionBar actionBar = getActivity().getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
getActivity().getActionBar().setSubtitle("All pending requests");
}
@Override
public void onResume() {
super.onResume();
// Register mMessageReceiver to receive messages.
LocalBroadcastManager.getInstance(getActivity()).registerReceiver(
mMessageReceiver,
new IntentFilter(ListViewRequesterFragment.RESPONSE));
}
// handler for received Intents for the ListViewRequesterFragment.RESPONSE event
private BroadcastReceiver mMessageReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// Extract data included in the Intent
String message = intent
.getStringExtra(ListViewRequesterFragment.RESPONSE);
Log.d("receiver", "Got message: " + message);
if(message == SUCCESS)
onSuccess();
else
onFailure();
}
};
@Override
public void onPause() {
// Unregister since the activity is not visible
LocalBroadcastManager.getInstance(getActivity()).unregisterReceiver(
mMessageReceiver);
super.onPause();
}
public static void refreshRequests(){
}
public void getRequests() {
final GetAllRequestsTask task = new GetAllRequestsTask(getActivity(),this);
this.getActivity().runOnUiThread(new Runnable(){
@Override
public void run() {
// TODO Auto-generated method stub
task.execute();
}
});
}
public void onFailure() {
Toast.makeText(getActivity(), "No connection to server",
Toast.LENGTH_LONG).show();
}
public void onSuccess() {
Log.d("response", httpResponse);
requests_.clear(); // Remove old received requests from list.
JSONArray jArray;
try {
jArray = new JSONArray(httpResponse);
for (int i = 0; i < jArray.length(); i++) {
JSONObject j = jArray.getJSONObject(i);
r = new Requester(j); //moved to inside class. Makes more sense.
requests_.add(r);
}
} catch (JSONException e) {
Log.e("JSON Parsing Exception", "JSON failed to parse");
e.printStackTrace();
}
updateList(requests_);
}
private void updateList(ArrayList<Requester> requests) {
// Create ArrayList of names to be put into ListItems
ArrayList<String> stringList = new ArrayList<String>();
for (int i = 0; i < requests.size(); i++) {
stringList.add(requests.get(i).getName());
}
RequesterListAdapter listAdapter = new RequesterListAdapter(
getActivity(), requests);
this.setListAdapter(listAdapter);
ListView lv = this.getListView();
lv.setTextFilterEnabled(true);
final OnRequestAcceptedHandler handler = (OnRequestAcceptedHandler)this;
// Anon class for ListView OnClick
lv.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> parent, View view,
int position, long id) {
dialog = PopupDialog.getInstance(((Requester) getListAdapter().getItem(position)),handler);
dialog.show(getFragmentManager(), "PopUpDialogFragment");
}
});
}
@Override
public void onAllRequestsReceived(String resp) {
// TODO Auto-generated method stub
Log.d("debug", "onIT!");
JSONObject jObject;
final ArrayList<Requester> requests = new ArrayList<Requester>() ;
try {
jObject = new JSONObject(resp);
JSONArray jArray;
jArray = jObject.getJSONArray("results");
for (int i = 0; i < jArray.length(); i++) {
JSONObject j = jArray.getJSONObject(i);
r = new Requester(j); //moved to inside class. Makes more sense.
Log.d("name", r.getName());
requests.add(r);
}
}catch(Exception e){
e.printStackTrace();
}
if(progDialog.isShowing()){
progDialog.dismiss();
}
getActivity().runOnUiThread(new Runnable(){
@Override
public void run() {
// TODO Auto-generated method stub
updateList(requests);
}
});
}
@Override
public void onRequestAccepted() {
// TODO Auto-generated method stub
GetAllRequestsTask task = new GetAllRequestsTask(this.getActivity(),this);
task.execute();
}
}
| |
/*
* Copyright (c) 2017 VMware Inc. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.hillview.dataset;
import com.google.common.net.HostAndPort;
import com.google.protobuf.ByteString;
import io.grpc.netty.NettyChannelBuilder;
import io.grpc.stub.StreamObserver;
import org.apache.commons.lang3.SerializationUtils;
import org.hillview.dataset.api.*;
import org.hillview.pb.Ack;
import org.hillview.pb.Command;
import org.hillview.pb.HillviewServerGrpc;
import org.hillview.pb.PartialResponse;
import org.hillview.remoting.*;
import org.hillview.utils.Converters;
import rx.Observable;
import rx.subjects.PublishSubject;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import static org.hillview.remoting.HillviewServer.DEFAULT_IDS_INDEX;
/**
* An IDataSet that is a proxy for a DataSet on a remote machine. The remote IDataSet
* is pointed to by (serverEndpoint, remoteHandle). Any RemoteDataSet instantiated
* with a wrong value for either entry of the tuple will result in an exception.
*/
public class RemoteDataSet<T> implements IDataSet<T> {
private final static int TIMEOUT = 10000; // TODO: import via config file
private final int remoteHandle;
private final HostAndPort serverEndpoint;
private final HillviewServerGrpc.HillviewServerStub stub;
public RemoteDataSet(final HostAndPort serverEndpoint) {
this(serverEndpoint, DEFAULT_IDS_INDEX);
}
public RemoteDataSet(final HostAndPort serverEndpoint, final int remoteHandle) {
this.serverEndpoint = serverEndpoint;
this.remoteHandle = remoteHandle;
this.stub = HillviewServerGrpc.newStub(NettyChannelBuilder
.forAddress(serverEndpoint.getHost(), serverEndpoint.getPort())
.usePlaintext(true) // channel is unencrypted.
.build());
}
/**
* Map operations on a RemoteDataSet result in only one onNext
* invocation that will return the final IDataSet.
*/
@Override
public <S> Observable<PartialResult<IDataSet<S>>> map(final IMap<T, S> mapper) {
final MapOperation<T, S> mapOp = new MapOperation<T, S>(mapper);
final byte[] serializedOp = SerializationUtils.serialize(mapOp);
final Command command = Command.newBuilder()
.setIdsIndex(this.remoteHandle)
.setSerializedOp(ByteString.copyFrom(serializedOp))
.build();
final PublishSubject<PartialResult<IDataSet<S>>> subj = PublishSubject.create();
final StreamObserver<PartialResponse> responseObserver = new NewDataSetObserver<S>(subj);
return subj.doOnSubscribe(() -> this.stub.withDeadlineAfter(TIMEOUT, TimeUnit.MILLISECONDS)
.map(command, responseObserver))
.doOnUnsubscribe(() -> this.unsubscribe(mapOp.id));
}
@Override
public <S> Observable<PartialResult<IDataSet<S>>> flatMap(IMap<T, List<S>> mapper) {
final FlatMapOperation<T, S> mapOp = new FlatMapOperation<T, S>(mapper);
final byte[] serializedOp = SerializationUtils.serialize(mapOp);
final Command command = Command.newBuilder()
.setIdsIndex(this.remoteHandle)
.setSerializedOp(ByteString.copyFrom(serializedOp))
.build();
final PublishSubject<PartialResult<IDataSet<S>>> subj = PublishSubject.create();
final StreamObserver<PartialResponse> responseObserver = new NewDataSetObserver<S>(subj);
return subj.doOnSubscribe(() -> this.stub.withDeadlineAfter(TIMEOUT, TimeUnit.MILLISECONDS)
.flatMap(command, responseObserver))
.doOnUnsubscribe(() -> this.unsubscribe(mapOp.id));
}
/**
* Sketch operation that streams partial results from the server to the caller.
*/
@Override
public <R> Observable<PartialResult<R>> sketch(final ISketch<T, R> sketch) {
final SketchOperation<T, R> sketchOp = new SketchOperation<>(sketch);
final byte[] serializedOp = SerializationUtils.serialize(sketchOp);
final Command command = Command.newBuilder()
.setIdsIndex(this.remoteHandle)
.setSerializedOp(ByteString.copyFrom(serializedOp))
.build();
final PublishSubject<PartialResult<R>> subj = PublishSubject.create();
final StreamObserver<PartialResponse> responseObserver = new SketchObserver<>(subj);
return subj.doOnSubscribe(() -> this.stub.withDeadlineAfter(TIMEOUT, TimeUnit.MILLISECONDS)
.sketch(command, responseObserver))
.doOnUnsubscribe(() -> this.unsubscribe(sketchOp.id));
}
/**
* Zip operation on two IDataSet objects that need to reside on the same remote server.
*/
@Override
public <S> Observable<PartialResult<IDataSet<Pair<T, S>>>> zip(final IDataSet<S> other) {
if (!(other instanceof RemoteDataSet<?>)) {
throw new RuntimeException("Unexpected type in Zip " + other);
}
final RemoteDataSet<S> rds = (RemoteDataSet<S>) other;
// zip commands are not valid if the RemoteDataSet instances point to different
// actor systems or different nodes.
final HostAndPort leftAddress = this.serverEndpoint;
final HostAndPort rightAddress = rds.serverEndpoint;
if (!leftAddress.equals(rightAddress)) {
throw new RuntimeException("Zip command invalid for RemoteDataSets " +
"across different servers | left: " + leftAddress + ", right:" + rightAddress);
}
final ZipOperation zip = new ZipOperation(rds.remoteHandle);
final byte[] serializedOp = SerializationUtils.serialize(zip);
final Command command = Command.newBuilder()
.setIdsIndex(this.remoteHandle)
.setSerializedOp(ByteString.copyFrom(serializedOp))
.build();
final PublishSubject<PartialResult<IDataSet<Pair<T, S>>>> subj = PublishSubject.create();
final StreamObserver<PartialResponse> responseObserver =
new NewDataSetObserver<Pair<T, S>>(subj);
return subj.doOnSubscribe(() -> this.stub.withDeadlineAfter(TIMEOUT, TimeUnit.MILLISECONDS)
.zip(command, responseObserver))
.doOnUnsubscribe(() -> this.unsubscribe(zip.id));
}
/**
* Unsubscribes an operation. This method is safe to invoke multiple times because the
* logic on the remote end is idempotent.
*/
private void unsubscribe(final UUID id) {
final UnsubscribeOperation op = new UnsubscribeOperation(id);
final byte[] serializedOp = SerializationUtils.serialize(op);
final Command command = Command.newBuilder()
.setIdsIndex(this.remoteHandle)
.setSerializedOp(ByteString.copyFrom(serializedOp))
.build();
this.stub.withDeadlineAfter(TIMEOUT, TimeUnit.MILLISECONDS)
.unsubscribe(command, new StreamObserver<Ack>() {
@Override
public void onNext(final Ack ack) {
}
@Override
public void onError(final Throwable throwable) {
}
@Override
public void onCompleted() {
}
});
}
/**
* A StreamObserver that transfers incoming onNext, onError and onCompleted invocations
* from a gRPC streaming call to that of a publish subject.
*/
private abstract static class OperationObserver<T> implements StreamObserver<PartialResponse> {
final PublishSubject<T> subject;
public OperationObserver(final PublishSubject<T> subject) {
this.subject = subject;
}
@Override
public void onNext(final PartialResponse response) {
this.subject.onNext(processResponse(response));
}
@Override
public void onError(final Throwable throwable) {
this.subject.onError(throwable);
}
@Override
public void onCompleted() {
this.subject.onCompleted();
}
public abstract T processResponse(final PartialResponse response);
}
/**
* StreamObserver used by map() and zip() implementations above to point to instantiate
* a new RemoteDataSet that points to a dataset on a remote server.
*/
private class NewDataSetObserver<S> extends OperationObserver<PartialResult<IDataSet<S>>> {
public NewDataSetObserver(PublishSubject<PartialResult<IDataSet<S>>> subject) {
super(subject);
}
@Override
public PartialResult<IDataSet<S>> processResponse(final PartialResponse response) {
final OperationResponse op = SerializationUtils.deserialize(response
.getSerializedOp().toByteArray());
final IDataSet<S> ids = (op.result == null) ? null :
new RemoteDataSet<S>(RemoteDataSet.this.serverEndpoint, (int) op.result);
return new PartialResult<IDataSet<S>>(ids);
}
}
/**
* StreamObserver used by sketch() implementation above.
*/
private static class SketchObserver<S> extends OperationObserver<PartialResult<S>> {
public SketchObserver(final PublishSubject<PartialResult<S>> subject) {
super(subject);
}
@Override
@SuppressWarnings("unchecked")
public PartialResult<S> processResponse(final PartialResponse response) {
final OperationResponse op = SerializationUtils.deserialize(response
.getSerializedOp().toByteArray());
return (PartialResult<S>) Converters.checkNull(op.result);
}
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.examples;
import com.google.api.services.datastore.DatastoreV1;
import com.google.api.services.datastore.DatastoreV1.Entity;
import com.google.api.services.datastore.DatastoreV1.Key;
import com.google.api.services.datastore.DatastoreV1.Property;
import com.google.api.services.datastore.DatastoreV1.Query;
import com.google.api.services.datastore.DatastoreV1.Value;
import com.google.api.services.datastore.client.DatastoreHelper;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.io.DatastoreIO;
import com.google.cloud.dataflow.sdk.io.TextIO;
import com.google.cloud.dataflow.sdk.options.Default;
import com.google.cloud.dataflow.sdk.options.Description;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.options.Validation;
import com.google.cloud.dataflow.sdk.runners.DirectPipelineRunner;
import com.google.cloud.dataflow.sdk.runners.PipelineRunner;
import com.google.cloud.dataflow.sdk.transforms.DoFn;
import com.google.cloud.dataflow.sdk.transforms.ParDo;
import java.util.Map;
/**
* A WordCount example using DatastoreIO.
*
* <p> This example shows how to use DatastoreIO to read from Datastore and
* write the results to Cloud Storage. Note that this example will write
* data to Datastore, which may incur charge for Datastore operations.
*
* <p> To run this example, users need to set up the environment and use gcloud
* to get credential for Datastore:
* <pre>
* $ export CLOUDSDK_EXTRA_SCOPES=https://www.googleapis.com/auth/datastore
* $ gcloud auth login
* </pre>
*
* <p> Note that the environment variable CLOUDSDK_EXTRA_SCOPES must be set
* to the same value when executing a Datastore pipeline, as the local auth
* cache is keyed by the requested scopes.
*
* <p> To run this pipeline locally, the following options must be provided:
* <pre>{@code
* --project=<PROJECT ID>
* --dataset=<DATASET ID>
* --output=[<LOCAL FILE> | gs://<OUTPUT PATH>]
* }</pre>
*
* <p> To run this example using Dataflow service, you must additionally
* provide either {@literal --stagingLocation} or {@literal --tempLocation}, and
* select one of the Dataflow pipeline runners, eg
* {@literal --runner=BlockingDataflowPipelineRunner}.
*/
public class DatastoreWordCount {
/**
* A DoFn that gets the content of an entity (one line in a
* Shakespeare play) and converts it to a string.
*/
static class GetContentFn extends DoFn<Entity, String> {
private static final long serialVersionUID = 0;
@Override
public void processElement(ProcessContext c) {
Map<String, Value> props = DatastoreHelper.getPropertyMap(c.element());
DatastoreV1.Value value = props.get("content");
if (value != null) {
c.output(DatastoreHelper.getString(value));
}
}
}
/**
* A DoFn that creates entity for every line in Shakespeare.
*/
static class CreateEntityFn extends DoFn<String, Entity> {
private static final long serialVersionUID = 0;
private String kind;
CreateEntityFn(String kind) {
this.kind = kind;
}
public Entity makeEntity(String content) {
Entity.Builder entityBuilder = Entity.newBuilder();
// Create entities with same ancestor Key.
Key ancestorKey = DatastoreHelper.makeKey(kind, "root").build();
Key key = DatastoreHelper.makeKey(ancestorKey, kind).build();
entityBuilder.setKey(key);
entityBuilder.addProperty(Property.newBuilder().setName("content")
.setValue(Value.newBuilder().setStringValue(content)));
return entityBuilder.build();
}
@Override
public void processElement(ProcessContext c) {
c.output(makeEntity(c.element()));
}
}
/**
* Options supported by {@link DatastoreWordCount}.
* <p>
* Inherits standard configuration options.
*/
public static interface Options extends PipelineOptions {
@Description("Path of the file to read from and store to Datastore")
@Default.String("gs://dataflow-samples/shakespeare/kinglear.txt")
String getInput();
void setInput(String value);
@Description("Path of the file to write to")
@Validation.Required
String getOutput();
void setOutput(String value);
@Description("Dataset ID to read from datastore")
@Validation.Required
String getDataset();
void setDataset(String value);
@Description("Dataset entity kind")
@Default.String("shakespeare-demo")
String getKind();
void setKind(String value);
@Description("Read an existing dataset, do not write first")
boolean isReadOnly();
void setReadOnly(boolean value);
@Description("Number of output shards")
@Default.Integer(0) // If the system should choose automatically.
int getNumShards();
void setNumShards(int value);
}
/**
* An example which creates a pipeline to populate DatastoreIO from a
* text input. Forces use of DirectPipelineRunner for local execution mode.
*/
public static void writeDataToDatastore(Options options) {
// Storing the user-specified runner.
Class<? extends PipelineRunner<?>> tempRunner = options.getRunner();
try {
// Runs locally via DirectPiplineRunner, as writing is not yet implemented
// for the other runners.
options.setRunner(DirectPipelineRunner.class);
Pipeline p = Pipeline.create(options);
p.apply(TextIO.Read.named("ReadLines").from(options.getInput()))
.apply(ParDo.of(new CreateEntityFn(options.getKind())))
.apply(DatastoreIO.write().to(options.getDataset()));
p.run();
} finally {
// Resetting the runner to the user specified class.
options.setRunner(tempRunner);
}
}
/**
* An example which creates a pipeline to do DatastoreIO.Read from Datastore.
*/
public static void readDataFromDatastore(Options options) {
// Build a query: read all entities of the specified kind.
Query.Builder q = Query.newBuilder();
q.addKindBuilder().setName(options.getKind());
Query query = q.build();
Pipeline p = Pipeline.create(options);
p.apply(DatastoreIO.readFrom(options.getDataset(), query).named("ReadShakespeareFromDatastore"))
.apply(ParDo.of(new GetContentFn()))
.apply(new WordCount.CountWords())
.apply(TextIO.Write.named("WriteLines")
.to(options.getOutput())
.withNumShards(options.getNumShards()));
p.run();
}
/**
* Main function.
* An example to demo how to use DatastoreIO. The runner here is
* customizable, which means users could pass either DirectPipelineRunner
* or DataflowPipelineRunner in PipelineOptions.
*/
public static void main(String args[]) {
// The options are used in two places, for Dataflow service, and
// building DatastoreIO.Read object
Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
if (!options.isReadOnly()) {
// First example: write data to Datastore for reading later.
// Note: this will insert new entries with the given kind. Existing entries
// should be cleared first, or the final counts will contain duplicates.
// The Datastore Admin tool in the AppEngine console can be used to erase
// all entries with a particular kind.
DatastoreWordCount.writeDataToDatastore(options);
}
// Second example: do parallel read from Datastore.
DatastoreWordCount.readDataFromDatastore(options);
}
}
| |
package com.braintreepayments.api;
import androidx.annotation.Nullable;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Builder used to construct an local payment request.
*/
public class LocalPaymentRequest {
private static final String INTENT_KEY = "intent";
private static final String RETURN_URL_KEY = "returnUrl";
private static final String CANCEL_URL_KEY = "cancelUrl";
private static final String EXPERIENCE_PROFILE_KEY = "experienceProfile";
private static final String NO_SHIPPING_KEY = "noShipping";
private static final String DISPLAY_NAME_KEY = "brandName";
private static final String FUNDING_SOURCE_KEY = "fundingSource";
private static final String AMOUNT_KEY = "amount";
private static final String CURRENCY_CODE_KEY = "currencyIsoCode";
private static final String GIVEN_NAME_KEY = "firstName";
private static final String SURNAME_KEY = "lastName";
private static final String EMAIL_KEY = "payerEmail";
private static final String PHONE_KEY = "phone";
private static final String STREET_ADDRESS_KEY = "line1";
private static final String EXTENDED_ADDRESS_KEY = "line2";
private static final String LOCALITY_KEY = "city";
private static final String REGION_KEY = "state";
private static final String POSTAL_CODE_KEY = "postalCode";
private static final String COUNTRY_CODE_KEY = "countryCode";
private static final String MERCHANT_ACCOUNT_ID_KEY = "merchantAccountId";
private static final String PAYMENT_TYPE_COUNTRY_CODE_KEY = "paymentTypeCountryCode";
private static final String BIC_KEY = "bic";
private PostalAddress address;
private String amount;
private String bankIdentificationCode;
private String currencyCode;
private String displayName;
private String email;
private String givenName;
private String merchantAccountId;
private String paymentType;
private String paymentTypeCountryCode;
private String phone;
private boolean shippingAddressRequired;
private String surname;
/**
* @param address Optional - The address of the customer. An error will occur if this address is not valid.
*/
public void setAddress(@Nullable PostalAddress address) {
this.address = address;
}
/**
* @param amount Optional - The amount for the transaction.
*/
public void setAmount(@Nullable String amount) {
this.amount = amount;
}
/**
* @param bankIdentificationCode Optional - the Bank Identification Code of the customer (specific to iDEAL transactions).
*/
public void setBic(@Nullable String bankIdentificationCode) {
this.bankIdentificationCode = bankIdentificationCode;
}
/**
* @param currencyCode Optional - A valid ISO currency code to use for the transaction. Defaults to merchant
* currency code if not set.
*/
public void setCurrencyCode(@Nullable String currencyCode) {
this.currencyCode = currencyCode;
}
/**
* @param displayName Optional - The merchant name displayed inside of the Local Payment flow
* when starting the payment.
*/
public void setDisplayName(@Nullable String displayName) { this.displayName = displayName; }
/**
* @param email Optional - Payer email of the customer.
*/
public void setEmail(@Nullable String email) {
this.email = email;
}
/**
* @param givenName Optional - Given (first) name of the customer.
*/
public void setGivenName(@Nullable String givenName) {
this.givenName = givenName;
}
/**
* @param merchantAccountId Optional - A non-default merchant account to use for tokenization.
*/
public void setMerchantAccountId(@Nullable String merchantAccountId) {
this.merchantAccountId = merchantAccountId;
}
/**
* @param paymentType - The type of payment
*/
public void setPaymentType(@Nullable String paymentType) {
this.paymentType = paymentType;
}
/**
* @param paymentTypeCountryCode The country code of the local payment. This value must be one of
* the supported country codes for a given local payment type listed.
* For local payments supported in multiple countries, this value
* may determine which banks are presented to the customer.
* @see <a href=https://developer.paypal.com/braintree/docs/guides/local-payment-methods/client-side-custom/android/v4#invoke-payment-flow>Supported Country Codes</a>
*/
public void setPaymentTypeCountryCode(@Nullable String paymentTypeCountryCode) {
this.paymentTypeCountryCode = paymentTypeCountryCode;
}
/**
* @param phone Optional - Phone number of the customer.
*/
public void setPhone(@Nullable String phone) {
this.phone = phone;
}
/**
* @param shippingAddressRequired - Indicates whether or not the payment needs to be shipped. For digital goods,
* this should be false. Defaults to false.
*/
public void setShippingAddressRequired(boolean shippingAddressRequired) {
this.shippingAddressRequired = shippingAddressRequired;
}
/**
* @param surname Optional - Surname (last name) of the customer.
*/
public void setSurname(@Nullable String surname) {
this.surname = surname;
}
@Nullable
public PostalAddress getAddress() {
return address;
}
@Nullable
public String getAmount() {
return amount;
}
@Nullable
public String getBic() {
return bankIdentificationCode;
}
@Nullable
public String getCurrencyCode() {
return currencyCode;
}
@Nullable
public String getDisplayName() {
return displayName;
}
@Nullable
public String getEmail() {
return email;
}
@Nullable
public String getGivenName() {
return givenName;
}
@Nullable
public String getMerchantAccountId() {
return merchantAccountId;
}
@Nullable
public String getPaymentType() {
return paymentType;
}
@Nullable
public String getPaymentTypeCountryCode() {
return paymentTypeCountryCode;
}
@Nullable
public String getPhone() {
return phone;
}
public boolean isShippingAddressRequired() {
return shippingAddressRequired;
}
@Nullable
public String getSurname() {
return surname;
}
public String build(String returnUrl, String cancelUrl) {
try {
JSONObject payload = new JSONObject()
.put(INTENT_KEY, "sale")
.put(RETURN_URL_KEY, returnUrl)
.put(CANCEL_URL_KEY, cancelUrl)
.put(FUNDING_SOURCE_KEY, paymentType)
.put(AMOUNT_KEY, amount)
.put(CURRENCY_CODE_KEY, currencyCode)
.put(GIVEN_NAME_KEY, givenName)
.put(SURNAME_KEY, surname)
.put(EMAIL_KEY, email)
.put(PHONE_KEY, phone)
.put(MERCHANT_ACCOUNT_ID_KEY, merchantAccountId)
.putOpt(PAYMENT_TYPE_COUNTRY_CODE_KEY, paymentTypeCountryCode)
.putOpt(BIC_KEY, bankIdentificationCode);
if (address != null) {
payload.put(STREET_ADDRESS_KEY, address.getStreetAddress())
.put(EXTENDED_ADDRESS_KEY, address.getExtendedAddress())
.put(LOCALITY_KEY, address.getLocality())
.put(REGION_KEY, address.getRegion())
.put(POSTAL_CODE_KEY, address.getPostalCode())
.put(COUNTRY_CODE_KEY, address.getCountryCodeAlpha2());
}
JSONObject experienceProfile = new JSONObject();
experienceProfile.put(NO_SHIPPING_KEY, !shippingAddressRequired);
experienceProfile.put(DISPLAY_NAME_KEY, displayName);
payload.put(EXPERIENCE_PROFILE_KEY, experienceProfile);
return payload.toString();
} catch (JSONException ignored) {}
return new JSONObject().toString();
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.jdbc.cdc.postgres;
import com.streamsets.pipeline.api.ConfigDef;
import com.streamsets.pipeline.api.ConfigDefBean;
import com.streamsets.pipeline.api.Dependency;
import com.streamsets.pipeline.api.MultiValueChooserModel;
import com.streamsets.pipeline.api.ValueChooserModel;
import com.streamsets.pipeline.config.TimeZoneChooserValues;
import com.streamsets.pipeline.lib.el.TimeEL;
import com.streamsets.pipeline.lib.jdbc.parser.sql.UnsupportedFieldTypeChooserValues;
import com.streamsets.pipeline.lib.jdbc.parser.sql.UnsupportedFieldTypeValues;
import com.streamsets.pipeline.stage.origin.jdbc.cdc.CDCSourceConfigBean;
import java.util.List;
public class PostgresCDCConfigBean {
@ConfigDefBean
public CDCSourceConfigBean baseConfigBean = new CDCSourceConfigBean();
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Initial Change",
description = "Determines where to start reading",
displayPosition = 40,
group = "CDC",
defaultValue = "LATEST"
)
@ValueChooserModel(StartChooserValues.class)
public StartValues startValue;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Start Date",
description = "Datetime to use for the initial change. Use the following format: MM-DD-YYYY HH24:MM:SS.",
displayPosition = 45,
group = "CDC",
dependsOn = "startValue",
triggeredByValue = "DATE"
)
public String startDate;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "DB Time Zone",
description = "Time Zone that the DB is operating in",
displayPosition = 45,
group = "CDC",
dependsOn = "startValue",
triggeredByValue = "DATE"
)
@ValueChooserModel(TimeZoneChooserValues.class)
public String dbTimeZone;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Start LSN",
description = "Logical Sequence Number to use for the initial change",
displayPosition = 45,
group = "CDC",
dependsOn = "startValue",
triggeredByValue = "LSN"
)
public String startLSN;
@ConfigDef(
required = false,
type = ConfigDef.Type.BOOLEAN,
label = "Remove Replication Slot on Close",
description = "Removing on close means no WAL updates for that slot will be generated, but "
+ "system performance will not be impacted.",
displayPosition = 50,
group = "CDC",
defaultValue = "false")
public boolean removeSlotOnClose;
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Replication Slot",
description = "Name of slot to create.",
defaultValue="sdc",
displayPosition = 50,
group = "CDC"
)
public String slot;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Minimum PostgreSQL Version",
description = "Minimum PostgreSQL version to assume.",
defaultValue ="NINEFOUR",
displayPosition = 50,
group = "CDC"
)
@ValueChooserModel(PgVerChooserValues.class)
public PgVersionValues minVersion;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
defaultValue = "[\"INSERT\", \"UPDATE\", \"DELETE\"]",
label = "Operations",
description = "Operations to capture as records. All other operations are ignored.",
displayPosition = 70,
group = "CDC"
)
@MultiValueChooserModel(PostgresChangeTypesChooserValues.class)
public List<PostgresChangeTypeValues> postgresChangeTypes;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
description = "Maximum amount of time to wait for data before sending a partial or empty batch",
label = "Batch Wait Time (ms)",
// Greater than poll time
defaultValue = "15000",
min = 1,
group = "CDC",
displayPosition = 90
)
public long maxBatchWaitTime = 1000L;
//HIDDEN - only choice supported today
@ConfigDef(
required = true,
type = ConfigDef.Type.STRING,
label = "Replication Type",
description = "Database support",
defaultValue ="database",
displayPosition = 50,
group = "CDC"
)
public String replicationType;
//HIDDEN - only choice supported today
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Output Decoder",
description = "Output Decoder installed with Postgres",
displayPosition = 50,
group = "CDC",
defaultValue = "WAL2JSON"
)
@ValueChooserModel(DecoderChooserValues.class)
public DecoderValues decoderValue;
@ConfigDef(
required = true,
type = ConfigDef.Type.MODEL,
label = "Unsupported Field Type",
description = "Action to take if an unsupported field type is encountered.",
displayPosition = 110,
group = "CDC",
defaultValue = "TO_ERROR"
)
@ValueChooserModel(UnsupportedFieldTypeChooserValues.class)
public UnsupportedFieldTypeValues unsupportedFieldOp;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
label = "Add Unsupported Fields",
description = "Add values of unsupported fields as unparsed strings to records",
displayPosition = 115,
group = "CDC",
defaultValue = "false"
)
public boolean sendUnsupportedFields;
@ConfigDef(
required = true,
type = ConfigDef.Type.BOOLEAN,
defaultValue = "false",
label = "Convert Timestamp To String",
description = "Rather then representing timestamps as Data Collector DATETIME type, use String.",
displayPosition = 120,
group = "CDC"
)
public boolean convertTimestampToString;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Query Timeout",
description = "Time to wait before timing out a WAL query and returning the batch.",
displayPosition = 140,
group = "CDC",
elDefs = TimeEL.class,
defaultValue = "${5 * MINUTES}"
)
public int queryTimeout;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Poll Interval",
description = "Interval between checking for CDC updates when no data.",
displayPosition = 140,
group = "CDC",
elDefs = TimeEL.class,
defaultValue = "${1 * SECONDS}"
)
public int pollInterval;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "Status Interval",
description = "Interval between sending heart beats to Postgres. Should be less than wal_sender_timeout",
displayPosition = 145,
group = "CDC",
elDefs = TimeEL.class,
// Default wal_sender_timeout is 60 seconds
defaultValue = "${30 * SECONDS}"
)
public int statusInterval;
@ConfigDef(
required = true,
type = ConfigDef.Type.NUMBER,
label = "CDC Generator Queue Size",
description = "CDC Generator Queue Size.",
displayPosition = 150,
group = "CDC",
min = 1,
max = Integer.MAX_VALUE,
defaultValue = "500"
)
public int generatorQueueMaxSize;
//HIDDEN
@ConfigDef(
required = false,
type = ConfigDef.Type.BOOLEAN,
label = "Parse SQL Query",
description = "",
displayPosition = 150,
group = "CDC",
defaultValue = "false"
)
public boolean parseQuery;
}
| |
package au.com.icontacts.sync;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.accounts.AccountManagerCallback;
import android.accounts.AccountManagerFuture;
import android.accounts.AuthenticatorException;
import android.accounts.OperationCanceledException;
import android.app.Activity;
import android.content.Context;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import au.com.icontacts.R;
/**
* Provides access to the iDashboard API.
*/
public final class IDashApi {
private static Context mContext;
private static AccountManager mAccountManager;
private static String mAuthToken;
private static Account mConnectedAccount;
private static final String BASE_URL = "http://www.idashboard.com.au/api/";
private static final String CONTACTS = "contacts/";
public static void connect(Activity activity, final String accountType, final String authTokenType) {
mContext = activity;
mAccountManager = AccountManager.get(mContext);
mAccountManager.getAuthTokenByFeatures(accountType, authTokenType, null, activity, null, null,
new AccountManagerCallback<Bundle>() {
@Override
public void run(AccountManagerFuture<Bundle> future) {
try {
Bundle bundle = future.getResult();
mAuthToken = bundle.getString(AccountManager.KEY_AUTHTOKEN);
Log.i("connect", mAuthToken);
if (mAuthToken != null) {
String accountName = bundle.getString(AccountManager.KEY_ACCOUNT_NAME);
mConnectedAccount = new Account(accountName, accountType);
}
} catch (OperationCanceledException e) {
// TODO: Handle exception
} catch (IOException e) {
// TODO: Handle exception
} catch (AuthenticatorException e) {
// TODO: Handle exception
}
}
}
, null);
}
public static String userLogin(String username, String password, String authTokenType) {
String parameters = generateLoginParameters(username, password);
try {
return performLoginRequest(new URL(mContext.getString(R.string.auth_url)), parameters);
} catch (MalformedURLException e) {
// TODO: handle
}
// Failed to obtain an authToken
return null;
}
/** Performs a login request to the API. */
private static String performLoginRequest(URL url, String parameters) {
HttpURLConnection connection = null;
try {
connection = (HttpURLConnection) url.openConnection();
connection.setDoOutput(true);
connection.setRequestProperty("Content-type", "application/x-www-form-urlencoded");
connection.setRequestMethod("POST");
OutputStreamWriter request = new OutputStreamWriter(connection.getOutputStream());
request.write(parameters);
request.flush();
request.close();
if (connection.getResponseCode() != 200) { return null; }
JSONObject response = readResponse(connection);
if (response.has("access_token")) {
Log.i("accessToken", response.getString("access_token"));
return response.getString("access_token");
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
} catch (JSONException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
} finally {
if (connection != null) { connection.disconnect(); }
}
return null;
}
/** Gets the JSON for a single page of contacts */
public static JSONObject getContactPage(int page, int perPage) throws JSONException {
HashMap<String, String> params = new HashMap<String, String>();
params.put("page", String.valueOf(page));
params.put("per_page", String.valueOf(perPage));
HttpURLConnection connection = getApiConnection(BASE_URL + CONTACTS, params);
return readResponse(connection).getJSONObject("results");
}
/** Gets full details for a single contact */
public static JSONObject getContact(int id) throws JSONException {
HttpURLConnection connection = getApiConnection(BASE_URL + CONTACTS + id);
return readResponse(connection).getJSONObject("contact");
}
/** Gets an API connection for the desired URL */
private static HttpURLConnection getApiConnection(String urlString) {
return getApiConnection(urlString, null);
}
/** Gets an API connection for the desired URL with the desired parameters */
@SuppressWarnings("deprecation")
private static HttpURLConnection getApiConnection(String urlString, HashMap<String, String> params) {
HttpURLConnection connection;
Bundle future;
try {
if (params != null) { urlString += createParamString(params); }
URL url = new URL(urlString);
connection = (HttpURLConnection) url.openConnection();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// TODO: Either mAccountManager or mConnectedAccount is null here. WHYYY.
future = mAccountManager.getAuthToken(mConnectedAccount, "Full Access", null, true, null, null).getResult();
} else {
future = mAccountManager.getAuthToken(mConnectedAccount, "Full Access", true, null, null).getResult();
}
mAuthToken = future.getString(AccountManager.KEY_AUTHTOKEN);
Log.i("getApiConnection", mAuthToken);
connection.setRequestProperty("Authorization", "Bearer " + mAuthToken);
return connection;
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
} catch (OperationCanceledException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
} catch (AuthenticatorException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
}
}
/**
* Turns an API response into a usable JSONObject
* @param connection the connection from which to read the response
* @return JSONObject parsed from the API response
*/
private static JSONObject readResponse(HttpURLConnection connection) {
try {
BufferedReader rd = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String line;
StringBuilder sb = new StringBuilder();
while ((line = rd.readLine()) != null) {
sb.append(line).append('\r');
}
rd.close();
return new JSONObject(sb.toString());
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
} catch (JSONException e) {
e.printStackTrace();
throw new RuntimeException(e);
// TODO: Handle properly.
} finally {
if (connection != null) { connection.disconnect(); }
}
}
/** Creates an HTML parameter string from a HashMap, eg: ?param1=value1¶m2=value2 */
private static String createParamString(HashMap<String, String> params) {
StringBuilder sb = new StringBuilder("?");
for (String key : params.keySet()) {
sb.append(key).append("=").append(params.get(key)).append("&");
}
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
private static String generateLoginParameters(String username, String password) {
StringBuilder sb = new StringBuilder();
sb.append("grant_type=password&username=").append(username)
.append("&password=").append(password)
.append("&client_id=").append(mContext.getString(R.string.client_id))
.append("&client_secret=").append(mContext.getString(R.string.client_secret));
return sb.toString();
}
}
| |
/*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gson;
import com.google.gson.internal.ConstructorConstructor;
import com.google.gson.internal.Excluder;
import com.google.gson.internal.GsonInternalAccess;
import com.google.gson.internal.Primitives;
import com.google.gson.internal.Streams;
import com.google.gson.internal.bind.ArrayTypeAdapter;
import com.google.gson.internal.bind.BigDecimalTypeAdapter;
import com.google.gson.internal.bind.BigIntegerTypeAdapter;
import com.google.gson.internal.bind.CollectionTypeAdapterFactory;
import com.google.gson.internal.bind.DateTypeAdapter;
import com.google.gson.internal.bind.JsonTreeReader;
import com.google.gson.internal.bind.JsonTreeWriter;
import com.google.gson.internal.bind.MapTypeAdapterFactory;
import com.google.gson.internal.bind.ObjectTypeAdapter;
import com.google.gson.internal.bind.ReflectiveTypeAdapterFactory;
import com.google.gson.internal.bind.SqlDateTypeAdapter;
import com.google.gson.internal.bind.TimeTypeAdapter;
import com.google.gson.internal.bind.TypeAdapters;
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.JsonWriter;
import com.google.gson.stream.MalformedJsonException;
import java.io.EOFException;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This is the main class for using Gson. Gson is typically used by first constructing a
* Gson instance and then invoking {@link #toJson(Object)} or {@link #fromJson(String, Class)}
* methods on it.
*
* <p>You can create a Gson instance by invoking {@code new Gson()} if the default configuration
* is all you need. You can also use {@link GsonBuilder} to build a Gson instance with various
* configuration options such as versioning support, pretty printing, custom
* {@link JsonSerializer}s, {@link JsonDeserializer}s, and {@link InstanceCreator}s.</p>
*
* <p>Here is an example of how Gson is used for a simple Class:
*
* <pre>
* Gson gson = new Gson(); // Or use new GsonBuilder().create();
* MyType target = new MyType();
* String json = gson.toJson(target); // serializes target to Json
* MyType target2 = gson.fromJson(json, MyType.class); // deserializes json into target2
* </pre></p>
*
* <p>If the object that your are serializing/deserializing is a {@code ParameterizedType}
* (i.e. contains at least one type parameter and may be an array) then you must use the
* {@link #toJson(Object, Type)} or {@link #fromJson(String, Type)} method. Here is an
* example for serializing and deserialing a {@code ParameterizedType}:
*
* <pre>
* Type listType = new TypeToken<List<String>>() {}.getType();
* List<String> target = new LinkedList<String>();
* target.add("blah");
*
* Gson gson = new Gson();
* String json = gson.toJson(target, listType);
* List<String> target2 = gson.fromJson(json, listType);
* </pre></p>
*
* <p>See the <a href="https://sites.google.com/site/gson/gson-user-guide">Gson User Guide</a>
* for a more complete set of examples.</p>
*
* @see com.google.gson.reflect.TypeToken
*
* @author Inderjeet Singh
* @author Joel Leitch
*/
public final class Gson {
static final boolean DEFAULT_JSON_NON_EXECUTABLE = false;
private static final String JSON_NON_EXECUTABLE_PREFIX = ")]}'\n";
/**
* This thread local guards against reentrant calls to getAdapter(). In
* certain object graphs, creating an adapter for a type may recursively
* require an adapter for the same type! Without intervention, the recursive
* lookup would stack overflow. We cheat by returning a proxy type adapter.
* The proxy is wired up once the initial adapter has been created.
*/
private final ThreadLocal<Map<TypeToken<?>, FutureTypeAdapter<?>>> calls
= new ThreadLocal<Map<TypeToken<?>, FutureTypeAdapter<?>>>() {
@Override protected Map<TypeToken<?>, FutureTypeAdapter<?>> initialValue() {
return new HashMap<TypeToken<?>, FutureTypeAdapter<?>>();
}
};
private final Map<TypeToken<?>, TypeAdapter<?>> typeTokenCache
= Collections.synchronizedMap(new HashMap<TypeToken<?>, TypeAdapter<?>>());
private final List<TypeAdapterFactory> factories;
private final ConstructorConstructor constructorConstructor;
private final boolean serializeNulls;
private final boolean htmlSafe;
private final boolean generateNonExecutableJson;
private final boolean prettyPrinting;
final JsonDeserializationContext deserializationContext = new JsonDeserializationContext() {
@SuppressWarnings("unchecked")
public <T> T deserialize(JsonElement json, Type typeOfT) throws JsonParseException {
return (T) fromJson(json, typeOfT);
}
};
final JsonSerializationContext serializationContext = new JsonSerializationContext() {
public JsonElement serialize(Object src) {
return toJsonTree(src);
}
public JsonElement serialize(Object src, Type typeOfSrc) {
return toJsonTree(src, typeOfSrc);
}
};
/**
* Constructs a Gson object with default configuration. The default configuration has the
* following settings:
* <ul>
* <li>The JSON generated by <code>toJson</code> methods is in compact representation. This
* means that all the unneeded white-space is removed. You can change this behavior with
* {@link GsonBuilder#setPrettyPrinting()}. </li>
* <li>The generated JSON omits all the fields that are null. Note that nulls in arrays are
* kept as is since an array is an ordered list. Moreover, if a field is not null, but its
* generated JSON is empty, the field is kept. You can configure Gson to serialize null values
* by setting {@link GsonBuilder#serializeNulls()}.</li>
* <li>Gson provides default serialization and deserialization for Enums, {@link Map},
* {@link java.net.URL}, {@link java.net.URI}, {@link java.util.Locale}, {@link java.util.Date},
* {@link java.math.BigDecimal}, and {@link java.math.BigInteger} classes. If you would prefer
* to change the default representation, you can do so by registering a type adapter through
* {@link GsonBuilder#registerTypeAdapter(Type, Object)}. </li>
* <li>The default Date format is same as {@link java.text.DateFormat#DEFAULT}. This format
* ignores the millisecond portion of the date during serialization. You can change
* this by invoking {@link GsonBuilder#setDateFormat(int)} or
* {@link GsonBuilder#setDateFormat(String)}. </li>
* <li>By default, Gson ignores the {@link com.google.gson.annotations.Expose} annotation.
* You can enable Gson to serialize/deserialize only those fields marked with this annotation
* through {@link GsonBuilder#excludeFieldsWithoutExposeAnnotation()}. </li>
* <li>By default, Gson ignores the {@link com.google.gson.annotations.Since} annotation. You
* can enable Gson to use this annotation through {@link GsonBuilder#setVersion(double)}.</li>
* <li>The default field naming policy for the output Json is same as in Java. So, a Java class
* field <code>versionNumber</code> will be output as <code>"versionNumber@quot;</code> in
* Json. The same rules are applied for mapping incoming Json to the Java classes. You can
* change this policy through {@link GsonBuilder#setFieldNamingPolicy(FieldNamingPolicy)}.</li>
* <li>By default, Gson excludes <code>transient</code> or <code>static</code> fields from
* consideration for serialization and deserialization. You can change this behavior through
* {@link GsonBuilder#excludeFieldsWithModifiers(int...)}.</li>
* </ul>
*/
public Gson() {
this(Excluder.DEFAULT, FieldNamingPolicy.IDENTITY,
Collections.<Type, InstanceCreator<?>>emptyMap(), false, false, DEFAULT_JSON_NON_EXECUTABLE,
true, false, false, LongSerializationPolicy.DEFAULT,
Collections.<TypeAdapterFactory>emptyList());
}
Gson(final Excluder excluder, final FieldNamingStrategy fieldNamingPolicy,
final Map<Type, InstanceCreator<?>> instanceCreators, boolean serializeNulls,
boolean complexMapKeySerialization, boolean generateNonExecutableGson, boolean htmlSafe,
boolean prettyPrinting, boolean serializeSpecialFloatingPointValues,
LongSerializationPolicy longSerializationPolicy,
List<TypeAdapterFactory> typeAdapterFactories) {
this.constructorConstructor = new ConstructorConstructor(instanceCreators);
this.serializeNulls = serializeNulls;
this.generateNonExecutableJson = generateNonExecutableGson;
this.htmlSafe = htmlSafe;
this.prettyPrinting = prettyPrinting;
TypeAdapterFactory reflectiveTypeAdapterFactory = new ReflectiveTypeAdapterFactory(
constructorConstructor, fieldNamingPolicy, excluder);
ConstructorConstructor constructorConstructor = new ConstructorConstructor();
List<TypeAdapterFactory> factories = new ArrayList<TypeAdapterFactory>();
// built-in type adapters that cannot be overridden
factories.add(TypeAdapters.STRING_FACTORY);
factories.add(TypeAdapters.INTEGER_FACTORY);
factories.add(TypeAdapters.BOOLEAN_FACTORY);
factories.add(TypeAdapters.BYTE_FACTORY);
factories.add(TypeAdapters.SHORT_FACTORY);
factories.add(TypeAdapters.newFactory(long.class, Long.class,
longAdapter(longSerializationPolicy)));
factories.add(TypeAdapters.newFactory(double.class, Double.class,
doubleAdapter(serializeSpecialFloatingPointValues)));
factories.add(TypeAdapters.newFactory(float.class, Float.class,
floatAdapter(serializeSpecialFloatingPointValues)));
factories.add(excluder);
factories.add(TypeAdapters.NUMBER_FACTORY);
factories.add(TypeAdapters.CHARACTER_FACTORY);
factories.add(TypeAdapters.STRING_BUILDER_FACTORY);
factories.add(TypeAdapters.STRING_BUFFER_FACTORY);
factories.add(TypeAdapters.newFactory(BigDecimal.class, new BigDecimalTypeAdapter()));
factories.add(TypeAdapters.newFactory(BigInteger.class, new BigIntegerTypeAdapter()));
factories.add(TypeAdapters.JSON_ELEMENT_FACTORY);
factories.add(ObjectTypeAdapter.FACTORY);
// user's type adapters
factories.addAll(typeAdapterFactories);
// built-in type adapters that can be overridden
factories.add(new CollectionTypeAdapterFactory(constructorConstructor));
factories.add(TypeAdapters.URL_FACTORY);
factories.add(TypeAdapters.URI_FACTORY);
factories.add(TypeAdapters.UUID_FACTORY);
factories.add(TypeAdapters.LOCALE_FACTORY);
factories.add(TypeAdapters.INET_ADDRESS_FACTORY);
factories.add(TypeAdapters.BIT_SET_FACTORY);
factories.add(DateTypeAdapter.FACTORY);
factories.add(TypeAdapters.CALENDAR_FACTORY);
factories.add(TimeTypeAdapter.FACTORY);
factories.add(SqlDateTypeAdapter.FACTORY);
factories.add(TypeAdapters.TIMESTAMP_FACTORY);
factories.add(new MapTypeAdapterFactory(constructorConstructor, complexMapKeySerialization));
factories.add(ArrayTypeAdapter.FACTORY);
factories.add(TypeAdapters.ENUM_FACTORY);
factories.add(TypeAdapters.CLASS_FACTORY);
factories.add(reflectiveTypeAdapterFactory);
this.factories = Collections.unmodifiableList(factories);
}
private TypeAdapter<Number> doubleAdapter(boolean serializeSpecialFloatingPointValues) {
if (serializeSpecialFloatingPointValues) {
return TypeAdapters.DOUBLE;
}
return new TypeAdapter<Number>() {
@Override public Double read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
return in.nextDouble();
}
@Override public void write(JsonWriter out, Number value) throws IOException {
if (value == null) {
out.nullValue();
return;
}
double doubleValue = value.doubleValue();
checkValidFloatingPoint(doubleValue);
out.value(value);
}
};
}
private TypeAdapter<Number> floatAdapter(boolean serializeSpecialFloatingPointValues) {
if (serializeSpecialFloatingPointValues) {
return TypeAdapters.FLOAT;
}
return new TypeAdapter<Number>() {
@Override public Float read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
return (float) in.nextDouble();
}
@Override public void write(JsonWriter out, Number value) throws IOException {
if (value == null) {
out.nullValue();
return;
}
float floatValue = value.floatValue();
checkValidFloatingPoint(floatValue);
out.value(value);
}
};
}
private void checkValidFloatingPoint(double value) {
if (Double.isNaN(value) || Double.isInfinite(value)) {
throw new IllegalArgumentException(value
+ " is not a valid double value as per JSON specification. To override this"
+ " behavior, use GsonBuilder.serializeSpecialDoubleValues() method.");
}
}
private TypeAdapter<Number> longAdapter(LongSerializationPolicy longSerializationPolicy) {
if (longSerializationPolicy == LongSerializationPolicy.DEFAULT) {
return TypeAdapters.LONG;
}
return new TypeAdapter<Number>() {
@Override public Number read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
return in.nextLong();
}
@Override public void write(JsonWriter out, Number value) throws IOException {
if (value == null) {
out.nullValue();
return;
}
out.value(value.toString());
}
};
}
/**
* Returns the type adapter for {@code} type.
*
* @throws IllegalArgumentException if this GSON cannot serialize and
* deserialize {@code type}.
*/
@SuppressWarnings("unchecked")
public <T> TypeAdapter<T> getAdapter(TypeToken<T> type) {
TypeAdapter<?> cached = typeTokenCache.get(type);
if (cached != null) {
return (TypeAdapter<T>) cached;
}
Map<TypeToken<?>, FutureTypeAdapter<?>> threadCalls = calls.get();
// the key and value type parameters always agree
FutureTypeAdapter<T> ongoingCall = (FutureTypeAdapter<T>) threadCalls.get(type);
if (ongoingCall != null) {
return ongoingCall;
}
FutureTypeAdapter<T> call = new FutureTypeAdapter<T>();
threadCalls.put(type, call);
try {
for (TypeAdapterFactory factory : factories) {
TypeAdapter<T> candidate = factory.create(this, type);
if (candidate != null) {
call.setDelegate(candidate);
typeTokenCache.put(type, candidate);
return candidate;
}
}
throw new IllegalArgumentException("GSON cannot handle " + type);
} finally {
threadCalls.remove(type);
}
}
static {
GsonInternalAccess.INSTANCE = new GsonInternalAccess() {
@Override public <T> TypeAdapter<T> getNextAdapter(
Gson gson, TypeAdapterFactory skipPast, TypeToken<T> type) {
boolean skipPastFound = false;
for (TypeAdapterFactory factory : gson.factories) {
if (!skipPastFound) {
if (factory == skipPast) {
skipPastFound = true;
}
continue;
}
TypeAdapter<T> candidate = factory.create(gson, type);
if (candidate != null) {
return candidate;
}
}
throw new IllegalArgumentException("GSON cannot serialize " + type);
}
};
}
/**
* Returns the type adapter for {@code} type.
*
* @throws IllegalArgumentException if this GSON cannot serialize and
* deserialize {@code type}.
*/
public <T> TypeAdapter<T> getAdapter(Class<T> type) {
return getAdapter(TypeToken.get(type));
}
/**
* This method serializes the specified object into its equivalent representation as a tree of
* {@link JsonElement}s. This method should be used when the specified object is not a generic
* type. This method uses {@link Class#getClass()} to get the type for the specified object, but
* the {@code getClass()} loses the generic type information because of the Type Erasure feature
* of Java. Note that this method works fine if the any of the object fields are of generic type,
* just the object itself should not be of a generic type. If the object is of generic type, use
* {@link #toJsonTree(Object, Type)} instead.
*
* @param src the object for which Json representation is to be created setting for Gson
* @return Json representation of {@code src}.
* @since 1.4
*/
public JsonElement toJsonTree(Object src) {
if (src == null) {
return JsonNull.INSTANCE;
}
return toJsonTree(src, src.getClass());
}
/**
* This method serializes the specified object, including those of generic types, into its
* equivalent representation as a tree of {@link JsonElement}s. This method must be used if the
* specified object is a generic type. For non-generic objects, use {@link #toJsonTree(Object)}
* instead.
*
* @param src the object for which JSON representation is to be created
* @param typeOfSrc The specific genericized type of src. You can obtain
* this type by using the {@link com.google.gson.reflect.TypeToken} class. For example,
* to get the type for {@code Collection<Foo>}, you should use:
* <pre>
* Type typeOfSrc = new TypeToken<Collection<Foo>>(){}.getType();
* </pre>
* @return Json representation of {@code src}
* @since 1.4
*/
public JsonElement toJsonTree(Object src, Type typeOfSrc) {
JsonTreeWriter writer = new JsonTreeWriter();
toJson(src, typeOfSrc, writer);
return writer.get();
}
/**
* This method serializes the specified object into its equivalent Json representation.
* This method should be used when the specified object is not a generic type. This method uses
* {@link Class#getClass()} to get the type for the specified object, but the
* {@code getClass()} loses the generic type information because of the Type Erasure feature
* of Java. Note that this method works fine if the any of the object fields are of generic type,
* just the object itself should not be of a generic type. If the object is of generic type, use
* {@link #toJson(Object, Type)} instead. If you want to write out the object to a
* {@link Writer}, use {@link #toJson(Object, Appendable)} instead.
*
* @param src the object for which Json representation is to be created setting for Gson
* @return Json representation of {@code src}.
*/
public String toJson(Object src) {
if (src == null) {
return toJson(JsonNull.INSTANCE);
}
return toJson(src, src.getClass());
}
/**
* This method serializes the specified object, including those of generic types, into its
* equivalent Json representation. This method must be used if the specified object is a generic
* type. For non-generic objects, use {@link #toJson(Object)} instead. If you want to write out
* the object to a {@link Appendable}, use {@link #toJson(Object, Type, Appendable)} instead.
*
* @param src the object for which JSON representation is to be created
* @param typeOfSrc The specific genericized type of src. You can obtain
* this type by using the {@link com.google.gson.reflect.TypeToken} class. For example,
* to get the type for {@code Collection<Foo>}, you should use:
* <pre>
* Type typeOfSrc = new TypeToken<Collection<Foo>>(){}.getType();
* </pre>
* @return Json representation of {@code src}
*/
public String toJson(Object src, Type typeOfSrc) {
StringWriter writer = new StringWriter();
toJson(src, typeOfSrc, writer);
return writer.toString();
}
/**
* This method serializes the specified object into its equivalent Json representation.
* This method should be used when the specified object is not a generic type. This method uses
* {@link Class#getClass()} to get the type for the specified object, but the
* {@code getClass()} loses the generic type information because of the Type Erasure feature
* of Java. Note that this method works fine if the any of the object fields are of generic type,
* just the object itself should not be of a generic type. If the object is of generic type, use
* {@link #toJson(Object, Type, Appendable)} instead.
*
* @param src the object for which Json representation is to be created setting for Gson
* @param writer Writer to which the Json representation needs to be written
* @throws JsonIOException if there was a problem writing to the writer
* @since 1.2
*/
public void toJson(Object src, Appendable writer) throws JsonIOException {
if (src != null) {
toJson(src, src.getClass(), writer);
} else {
toJson(JsonNull.INSTANCE, writer);
}
}
/**
* This method serializes the specified object, including those of generic types, into its
* equivalent Json representation. This method must be used if the specified object is a generic
* type. For non-generic objects, use {@link #toJson(Object, Appendable)} instead.
*
* @param src the object for which JSON representation is to be created
* @param typeOfSrc The specific genericized type of src. You can obtain
* this type by using the {@link com.google.gson.reflect.TypeToken} class. For example,
* to get the type for {@code Collection<Foo>}, you should use:
* <pre>
* Type typeOfSrc = new TypeToken<Collection<Foo>>(){}.getType();
* </pre>
* @param writer Writer to which the Json representation of src needs to be written.
* @throws JsonIOException if there was a problem writing to the writer
* @since 1.2
*/
public void toJson(Object src, Type typeOfSrc, Appendable writer) throws JsonIOException {
try {
JsonWriter jsonWriter = newJsonWriter(Streams.writerForAppendable(writer));
toJson(src, typeOfSrc, jsonWriter);
} catch (IOException e) {
throw new JsonIOException(e);
}
}
/**
* Writes the JSON representation of {@code src} of type {@code typeOfSrc} to
* {@code writer}.
* @throws JsonIOException if there was a problem writing to the writer
*/
@SuppressWarnings("unchecked")
public void toJson(Object src, Type typeOfSrc, JsonWriter writer) throws JsonIOException {
TypeAdapter<?> adapter = getAdapter(TypeToken.get(typeOfSrc));
boolean oldLenient = writer.isLenient();
writer.setLenient(true);
boolean oldHtmlSafe = writer.isHtmlSafe();
writer.setHtmlSafe(htmlSafe);
boolean oldSerializeNulls = writer.getSerializeNulls();
writer.setSerializeNulls(serializeNulls);
try {
((TypeAdapter<Object>) adapter).write(writer, src);
} catch (IOException e) {
throw new JsonIOException(e);
} finally {
writer.setLenient(oldLenient);
writer.setHtmlSafe(oldHtmlSafe);
writer.setSerializeNulls(oldSerializeNulls);
}
}
/**
* Converts a tree of {@link JsonElement}s into its equivalent JSON representation.
*
* @param jsonElement root of a tree of {@link JsonElement}s
* @return JSON String representation of the tree
* @since 1.4
*/
public String toJson(JsonElement jsonElement) {
StringWriter writer = new StringWriter();
toJson(jsonElement, writer);
return writer.toString();
}
/**
* Writes out the equivalent JSON for a tree of {@link JsonElement}s.
*
* @param jsonElement root of a tree of {@link JsonElement}s
* @param writer Writer to which the Json representation needs to be written
* @throws JsonIOException if there was a problem writing to the writer
* @since 1.4
*/
public void toJson(JsonElement jsonElement, Appendable writer) throws JsonIOException {
try {
JsonWriter jsonWriter = newJsonWriter(Streams.writerForAppendable(writer));
toJson(jsonElement, jsonWriter);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Returns a new JSON writer configured for this GSON and with the non-execute
* prefix if that is configured.
*/
private JsonWriter newJsonWriter(Writer writer) throws IOException {
if (generateNonExecutableJson) {
writer.write(JSON_NON_EXECUTABLE_PREFIX);
}
JsonWriter jsonWriter = new JsonWriter(writer);
if (prettyPrinting) {
jsonWriter.setIndent(" ");
}
jsonWriter.setSerializeNulls(serializeNulls);
return jsonWriter;
}
/**
* Writes the JSON for {@code jsonElement} to {@code writer}.
* @throws JsonIOException if there was a problem writing to the writer
*/
public void toJson(JsonElement jsonElement, JsonWriter writer) throws JsonIOException {
boolean oldLenient = writer.isLenient();
writer.setLenient(true);
boolean oldHtmlSafe = writer.isHtmlSafe();
writer.setHtmlSafe(htmlSafe);
boolean oldSerializeNulls = writer.getSerializeNulls();
writer.setSerializeNulls(serializeNulls);
try {
Streams.write(jsonElement, writer);
} catch (IOException e) {
throw new JsonIOException(e);
} finally {
writer.setLenient(oldLenient);
writer.setHtmlSafe(oldHtmlSafe);
writer.setSerializeNulls(oldSerializeNulls);
}
}
/**
* This method deserializes the specified Json into an object of the specified class. It is not
* suitable to use if the specified class is a generic type since it will not have the generic
* type information because of the Type Erasure feature of Java. Therefore, this method should not
* be used if the desired type is a generic type. Note that this method works fine if the any of
* the fields of the specified object are generics, just the object itself should not be a
* generic type. For the cases when the object is of generic type, invoke
* {@link #fromJson(String, Type)}. If you have the Json in a {@link Reader} instead of
* a String, use {@link #fromJson(Reader, Class)} instead.
*
* @param <T> the type of the desired object
* @param json the string from which the object is to be deserialized
* @param classOfT the class of T
* @return an object of type T from the string
* @throws JsonSyntaxException if json is not a valid representation for an object of type
* classOfT
*/
public <T> T fromJson(String json, Class<T> classOfT) throws JsonSyntaxException {
Object object = fromJson(json, (Type) classOfT);
return Primitives.wrap(classOfT).cast(object);
}
/**
* This method deserializes the specified Json into an object of the specified type. This method
* is useful if the specified object is a generic type. For non-generic objects, use
* {@link #fromJson(String, Class)} instead. If you have the Json in a {@link Reader} instead of
* a String, use {@link #fromJson(Reader, Type)} instead.
*
* @param <T> the type of the desired object
* @param json the string from which the object is to be deserialized
* @param typeOfT The specific genericized type of src. You can obtain this type by using the
* {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for
* {@code Collection<Foo>}, you should use:
* <pre>
* Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
* </pre>
* @return an object of type T from the string
* @throws JsonParseException if json is not a valid representation for an object of type typeOfT
* @throws JsonSyntaxException if json is not a valid representation for an object of type
*/
@SuppressWarnings("unchecked")
public <T> T fromJson(String json, Type typeOfT) throws JsonSyntaxException {
if (json == null) {
return null;
}
StringReader reader = new StringReader(json);
T target = (T) fromJson(reader, typeOfT);
return target;
}
/**
* This method deserializes the Json read from the specified reader into an object of the
* specified class. It is not suitable to use if the specified class is a generic type since it
* will not have the generic type information because of the Type Erasure feature of Java.
* Therefore, this method should not be used if the desired type is a generic type. Note that
* this method works fine if the any of the fields of the specified object are generics, just the
* object itself should not be a generic type. For the cases when the object is of generic type,
* invoke {@link #fromJson(Reader, Type)}. If you have the Json in a String form instead of a
* {@link Reader}, use {@link #fromJson(String, Class)} instead.
*
* @param <T> the type of the desired object
* @param json the reader producing the Json from which the object is to be deserialized.
* @param classOfT the class of T
* @return an object of type T from the string
* @throws JsonIOException if there was a problem reading from the Reader
* @throws JsonSyntaxException if json is not a valid representation for an object of type
* @since 1.2
*/
public <T> T fromJson(Reader json, Class<T> classOfT) throws JsonSyntaxException, JsonIOException {
JsonReader jsonReader = new JsonReader(json);
Object object = fromJson(jsonReader, classOfT);
assertFullConsumption(object, jsonReader);
return Primitives.wrap(classOfT).cast(object);
}
/**
* This method deserializes the Json read from the specified reader into an object of the
* specified type. This method is useful if the specified object is a generic type. For
* non-generic objects, use {@link #fromJson(Reader, Class)} instead. If you have the Json in a
* String form instead of a {@link Reader}, use {@link #fromJson(String, Type)} instead.
*
* @param <T> the type of the desired object
* @param json the reader producing Json from which the object is to be deserialized
* @param typeOfT The specific genericized type of src. You can obtain this type by using the
* {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for
* {@code Collection<Foo>}, you should use:
* <pre>
* Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
* </pre>
* @return an object of type T from the json
* @throws JsonIOException if there was a problem reading from the Reader
* @throws JsonSyntaxException if json is not a valid representation for an object of type
* @since 1.2
*/
@SuppressWarnings("unchecked")
public <T> T fromJson(Reader json, Type typeOfT) throws JsonIOException, JsonSyntaxException {
JsonReader jsonReader = new JsonReader(json);
T object = (T) fromJson(jsonReader, typeOfT);
assertFullConsumption(object, jsonReader);
return object;
}
private static void assertFullConsumption(Object obj, JsonReader reader) {
try {
if (obj != null && reader.peek() != JsonToken.END_DOCUMENT) {
throw new JsonIOException("JSON document was not fully consumed.");
}
} catch (MalformedJsonException e) {
throw new JsonSyntaxException(e);
} catch (IOException e) {
throw new JsonIOException(e);
}
}
/**
* Reads the next JSON value from {@code reader} and convert it to an object
* of type {@code typeOfT}.
* Since Type is not parameterized by T, this method is type unsafe and should be used carefully
*
* @throws JsonIOException if there was a problem writing to the Reader
* @throws JsonSyntaxException if json is not a valid representation for an object of type
*/
@SuppressWarnings("unchecked")
public <T> T fromJson(JsonReader reader, Type typeOfT) throws JsonIOException, JsonSyntaxException {
boolean isEmpty = true;
boolean oldLenient = reader.isLenient();
reader.setLenient(true);
try {
reader.peek();
isEmpty = false;
TypeAdapter<T> typeAdapter = (TypeAdapter<T>) getAdapter(TypeToken.get(typeOfT));
return typeAdapter.read(reader);
} catch (EOFException e) {
/*
* For compatibility with JSON 1.5 and earlier, we return null for empty
* documents instead of throwing.
*/
if (isEmpty) {
return null;
}
throw new JsonSyntaxException(e);
} catch (IllegalStateException e) {
throw new JsonSyntaxException(e);
} catch (IOException e) {
// TODO(inder): Figure out whether it is indeed right to rethrow this as JsonSyntaxException
throw new JsonSyntaxException(e);
} finally {
reader.setLenient(oldLenient);
}
}
/**
* This method deserializes the Json read from the specified parse tree into an object of the
* specified type. It is not suitable to use if the specified class is a generic type since it
* will not have the generic type information because of the Type Erasure feature of Java.
* Therefore, this method should not be used if the desired type is a generic type. Note that
* this method works fine if the any of the fields of the specified object are generics, just the
* object itself should not be a generic type. For the cases when the object is of generic type,
* invoke {@link #fromJson(JsonElement, Type)}.
* @param <T> the type of the desired object
* @param json the root of the parse tree of {@link JsonElement}s from which the object is to
* be deserialized
* @param classOfT The class of T
* @return an object of type T from the json
* @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT
* @since 1.3
*/
public <T> T fromJson(JsonElement json, Class<T> classOfT) throws JsonSyntaxException {
Object object = fromJson(json, (Type) classOfT);
return Primitives.wrap(classOfT).cast(object);
}
/**
* This method deserializes the Json read from the specified parse tree into an object of the
* specified type. This method is useful if the specified object is a generic type. For
* non-generic objects, use {@link #fromJson(JsonElement, Class)} instead.
*
* @param <T> the type of the desired object
* @param json the root of the parse tree of {@link JsonElement}s from which the object is to
* be deserialized
* @param typeOfT The specific genericized type of src. You can obtain this type by using the
* {@link com.google.gson.reflect.TypeToken} class. For example, to get the type for
* {@code Collection<Foo>}, you should use:
* <pre>
* Type typeOfT = new TypeToken<Collection<Foo>>(){}.getType();
* </pre>
* @return an object of type T from the json
* @throws JsonSyntaxException if json is not a valid representation for an object of type typeOfT
* @since 1.3
*/
@SuppressWarnings("unchecked")
public <T> T fromJson(JsonElement json, Type typeOfT) throws JsonSyntaxException {
if (json == null) {
return null;
}
return (T) fromJson(new JsonTreeReader(json), typeOfT);
}
static class FutureTypeAdapter<T> extends TypeAdapter<T> {
private TypeAdapter<T> delegate;
public void setDelegate(TypeAdapter<T> typeAdapter) {
if (delegate != null) {
throw new AssertionError();
}
delegate = typeAdapter;
}
@Override public T read(JsonReader in) throws IOException {
if (delegate == null) {
throw new IllegalStateException();
}
return delegate.read(in);
}
@Override public void write(JsonWriter out, T value) throws IOException {
if (delegate == null) {
throw new IllegalStateException();
}
delegate.write(out, value);
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("{")
.append("serializeNulls:").append(serializeNulls)
.append("factories:").append(factories)
.append(",instanceCreators:").append(constructorConstructor)
.append("}");
return sb.toString();
}
}
| |
/*
* Copyright (c) 2011-2021 VMware, Inc. or its affiliates, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.netty.udp;
import java.net.SocketAddress;
import java.time.Duration;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Supplier;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.socket.InternetProtocolFamily;
import io.netty.handler.logging.LogLevel;
import io.netty.util.AttributeKey;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Mono;
import reactor.netty.Connection;
import reactor.netty.ConnectionObserver;
import reactor.netty.channel.ChannelMetricsRecorder;
import reactor.netty.resources.LoopResources;
import reactor.netty.transport.AddressUtils;
import reactor.netty.transport.Transport;
import reactor.util.Logger;
import reactor.util.Loggers;
import reactor.util.annotation.Nullable;
import static reactor.netty.ReactorNetty.format;
/**
* A UdpServer allows to build in a safe immutable way a UDP server that is materialized
* and connecting when {@link #bind()} is ultimately called.
* <p>
* <p> Example:
* <pre>
* {@code
* UdpServer.create()
* .doOnBind(startMetrics)
* .doOnBound(startedMetrics)
* .doOnUnbind(stopMetrics)
* .host("127.0.0.1")
* .port(1234)
* .bind()
* .block()
* }
* </pre>
*
* @author Stephane Maldini
* @author Violeta Georgieva
*/
public abstract class UdpServer extends Transport<UdpServer, UdpServerConfig> {
/**
* Prepare a {@link UdpServer}
*
* @return a {@link UdpServer}
*/
public static UdpServer create() {
return UdpServerBind.INSTANCE;
}
@Override
public final <A> UdpServer attr(AttributeKey<A> key, @Nullable A value) {
return super.attr(key, value);
}
/**
* Binds the {@link UdpServer} and returns a {@link Mono} of {@link Connection}. If
* {@link Mono} is cancelled, the underlying binding will be aborted. Once the {@link
* Connection} has been emitted and is not necessary anymore, disposing the main server
* loop must be done by the user via {@link Connection#dispose()}.
*
* @return a {@link Mono} of {@link Connection}
*/
public abstract Mono<? extends Connection> bind();
@Override
public final UdpServer bindAddress(Supplier<? extends SocketAddress> bindAddressSupplier) {
return super.bindAddress(bindAddressSupplier);
}
/**
* Starts the server in a blocking fashion, and waits for it to finish initializing
* or the startup timeout expires (the startup timeout is {@code 45} seconds). The
* returned {@link Connection} offers simple server API, including to {@link
* Connection#disposeNow()} shut it down in a blocking fashion.
*
* @return a {@link Connection}
*/
public final Connection bindNow() {
return bindNow(Duration.ofSeconds(45));
}
/**
* Start the server in a blocking fashion, and wait for it to finish initializing
* or the provided startup timeout expires. The returned {@link Connection}
* offers simple server API, including to {@link Connection#disposeNow()}
* shut it down in a blocking fashion.
*
* @param timeout max startup timeout (resolution: ns)
* @return a {@link Connection}
*/
public final Connection bindNow(Duration timeout) {
Objects.requireNonNull(timeout, "timeout");
try {
return Objects.requireNonNull(bind().block(timeout), "aborted");
}
catch (IllegalStateException e) {
if (e.getMessage().contains("blocking read")) {
throw new IllegalStateException("UdpServer couldn't be started within " + timeout.toMillis() + "ms");
}
throw e;
}
}
/**
* Set or add a callback called when {@link UdpServer} is about to start listening for incoming traffic.
*
* @param doOnBind a consumer observing connected events
* @return a new {@link UdpServer} reference
*/
public final UdpServer doOnBind(Consumer<? super UdpServerConfig> doOnBind) {
Objects.requireNonNull(doOnBind, "doOnBind");
UdpServer dup = duplicate();
@SuppressWarnings("unchecked")
Consumer<UdpServerConfig> current = (Consumer<UdpServerConfig>) dup.configuration().doOnBind;
dup.configuration().doOnBind = current == null ? doOnBind : current.andThen(doOnBind);
return dup;
}
/**
* Set or add a callback called after {@link UdpServer} has been started.
*
* @param doOnBound a consumer observing connected events
* @return a new {@link UdpServer} reference
*/
public final UdpServer doOnBound(Consumer<? super Connection> doOnBound) {
Objects.requireNonNull(doOnBound, "doOnBound");
UdpServer dup = duplicate();
@SuppressWarnings("unchecked")
Consumer<Connection> current = (Consumer<Connection>) dup.configuration().doOnBound;
dup.configuration().doOnBound = current == null ? doOnBound : current.andThen(doOnBound);
return dup;
}
/**
* Set or add a callback called after {@link UdpServer} has been shutdown.
*
* @param doOnUnbound a consumer observing unbound events
* @return a new {@link UdpServer} reference
*/
public final UdpServer doOnUnbound(Consumer<? super Connection> doOnUnbound) {
Objects.requireNonNull(doOnUnbound, "doOnUnbound");
UdpServer dup = duplicate();
@SuppressWarnings("unchecked")
Consumer<Connection> current = (Consumer<Connection>) dup.configuration().doOnUnbound;
dup.configuration().doOnUnbound = current == null ? doOnUnbound : current.andThen(doOnUnbound);
return dup;
}
/**
* Attach an IO handler to react on connected client
*
* @param handler an IO handler that can dispose underlying connection when {@link
* Publisher} terminates.
*
* @return a new {@link UdpServer}
*/
public final UdpServer handle(BiFunction<? super UdpInbound, ? super UdpOutbound, ? extends Publisher<Void>> handler) {
Objects.requireNonNull(handler, "handler");
return doOnBound(new OnBoundHandle(handler));
}
/**
* The host to which this server should bind.
*
* @param host the host to bind to.
* @return a new {@link UdpServer} reference
*/
public final UdpServer host(String host) {
return bindAddress(() -> AddressUtils.updateHost(configuration().bindAddress(), host));
}
@Override
public final UdpServer metrics(boolean enable) {
return super.metrics(enable);
}
@Override
public final UdpServer metrics(boolean enable, Supplier<? extends ChannelMetricsRecorder> recorder) {
return super.metrics(enable, recorder);
}
@Override
public final UdpServer observe(ConnectionObserver observer) {
return super.observe(observer);
}
@Override
public final <O> UdpServer option(ChannelOption<O> key, @Nullable O value) {
return super.option(key, value);
}
/**
* The port to which this server should bind.
*
* @param port The port to bind to.
* @return a new {@link UdpServer} reference
*/
public final UdpServer port(int port) {
return bindAddress(() -> AddressUtils.updatePort(configuration().bindAddress(), port));
}
@Override
public final UdpServer runOn(EventLoopGroup eventLoopGroup) {
return super.runOn(eventLoopGroup);
}
@Override
public final UdpServer runOn(LoopResources channelResources) {
return super.runOn(channelResources);
}
/**
* Run IO loops on a supplied {@link EventLoopGroup} from the {@link LoopResources} container.
*
* @param loopResources a new loop resources
* @param preferNative should prefer running on epoll, kqueue or similar instead of java NIO
* @return a new {@link UdpServer} reference
*/
@Override
public final UdpServer runOn(LoopResources loopResources, boolean preferNative) {
Objects.requireNonNull(loopResources, "loopResources");
UdpServer dup = super.runOn(loopResources, preferNative);
dup.configuration().family = null;
return dup;
}
/**
* Run IO loops on a supplied {@link EventLoopGroup} from the {@link LoopResources} container.
*
* @param loopResources a new loop resources
* @param family a specific {@link InternetProtocolFamily} to run with
* @return a new {@link UdpServer} reference
*/
public final UdpServer runOn(LoopResources loopResources, InternetProtocolFamily family) {
Objects.requireNonNull(loopResources, "loopResources");
Objects.requireNonNull(family, "family");
UdpServer dup = super.runOn(loopResources, false);
dup.configuration().family = family;
return dup;
}
/**
* Based on the actual configuration, returns a {@link Mono} that triggers:
* <ul>
* <li>an initialization of the event loop group</li>
* <li>loads the necessary native libraries for the transport</li>
* </ul>
* By default, when method is not used, the {@code bind operation} absorbs the extra time needed to load resources.
*
* @return a {@link Mono} representing the completion of the warmup
* @since 1.0.3
*/
public final Mono<Void> warmup() {
return Mono.fromRunnable(() -> configuration().eventLoopGroup());
}
@Override
public final UdpServer wiretap(boolean enable) {
return super.wiretap(enable);
}
@Override
public final UdpServer wiretap(String category) {
return super.wiretap(category);
}
@Override
public final UdpServer wiretap(String category, LogLevel level) {
return super.wiretap(category, level);
}
static final Logger log = Loggers.getLogger(UdpServer.class);
static final class OnBoundHandle implements Consumer<Connection> {
final BiFunction<? super UdpInbound, ? super UdpOutbound, ? extends Publisher<Void>> handler;
OnBoundHandle(BiFunction<? super UdpInbound, ? super UdpOutbound, ? extends Publisher<Void>> handler) {
this.handler = handler;
}
@Override
public void accept(Connection c) {
if (log.isDebugEnabled()) {
log.debug(format(c.channel(), "Handler is being applied: {}"), handler);
}
Mono.fromDirect(handler.apply((UdpInbound) c, (UdpOutbound) c))
.subscribe(c.disposeSubscriber());
}
}
}
| |
package com.github.thbrown.softballsim.optimizer.impl.montecarloadaptive;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.stream.Collectors;
import com.github.thbrown.softballsim.Msg;
import com.github.thbrown.softballsim.Result;
import com.github.thbrown.softballsim.ResultStatusEnum;
import com.github.thbrown.softballsim.data.gson.DataPlayer;
import com.github.thbrown.softballsim.data.gson.DataStats;
import com.github.thbrown.softballsim.datasource.ProgressTracker;
import com.github.thbrown.softballsim.lineup.BattingLineup;
import com.github.thbrown.softballsim.lineupindexer.BattingLineupIndexer;
import com.github.thbrown.softballsim.lineupindexer.LineupTypeEnum;
import com.github.thbrown.softballsim.optimizer.Optimizer;
import com.github.thbrown.softballsim.optimizer.impl.montecarloexhaustive.HitGenerator;
import com.github.thbrown.softballsim.optimizer.impl.montecarloexhaustive.MonteCarloGameSimulation;
import com.github.thbrown.softballsim.util.Logger;
public class MonteCarloAdaptiveOptimizer implements Optimizer<MonteCarloAdaptiveResult> {
private static final int TASK_MAX_LINEUP_COUNT = 10;
private static final int TASK_MIN_LINEUP_COUNT = 1;
// Maximum number of tasks that should be queued up at once.
private static final int TASK_BUFFER_SIZE = 20000;
private long lineupIndex = 0;
@Override
public MonteCarloAdaptiveResult optimize(List<String> playersInLineup, LineupTypeEnum lineupType,
DataStats battingData, Map<String, String> arguments, ProgressTracker progressTracker,
MonteCarloAdaptiveResult existingResult) {
// Start the timer
long startTimestamp = System.currentTimeMillis();
// Check that the batting data we have is sufficient to run this optmizer
validateData(battingData, playersInLineup);
// Get the arguments as their expected types
MonteCarloAdaptiveArgumentParser parsedArguments = new MonteCarloAdaptiveArgumentParser(arguments);
final double ALPHA = parsedArguments.getAlpha();
final int INNINGS = parsedArguments.getInnings();
// Since this optimizer involves iterating over all possible lineups, we'll use
// the lineup indexer
BattingLineupIndexer indexer = lineupType.getLineupIndexer(battingData, playersInLineup);
// It might make sense to take a best guess at the optimal lineup by sorting
// each batter by AVG.
// If the first lineup is a reasonably good one, we'll be comparing new lineups
// against it and since
// it's more likely there will be a greater difference between mean runs scored
// for this lineup
// we'll have to run less simulations to detect that difference.
List<DataPlayer> firstLineup = indexer.getLineup(0).asList();
List<DataPlayer> modifiableList = new ArrayList<>(firstLineup);
Collections.sort(modifiableList, new Comparator<DataPlayer>() {
@Override
public int compare(DataPlayer o1, DataPlayer o2) {
double diff = (o2.getBattingAverage() - o1.getBattingAverage());
if (diff < 0) {
return -1;
} else if (diff > 0) {
return 1;
}
return 0;
}
});
List<String> playerIdsSortedByBattingAverage = modifiableList.stream().map(v -> v.getId())
.collect(Collectors.toList());
indexer = lineupType.getLineupIndexer(battingData, playerIdsSortedByBattingAverage);
// Our optimizer is parallelizable so we want to take advantage of multiple
// cores
ExecutorService executor = Executors.newFixedThreadPool(parsedArguments.getThreads());
Queue<Future<TTestTaskResult>> results = new LinkedList<>();
/*
* Build a hitGenerator that can be used across threads, this way we only have to parse the stats
* data once. We're using the first lineup here (index 0) to get a list of players, but we could
* have used any lineup.
*/
List<DataPlayer> someLineup = indexer.getLineup(0).asList();
HitGenerator hitGenerator = new HitGenerator(someLineup);
// This section involves setting up variables used by the simulation including
// restoring a paused
// simulation
long simulationsRun = Optional.ofNullable(existingResult).map(v -> v.getCountCompleted()).orElse(0L);
BattingLineup startingLineup = Optional.ofNullable(existingResult).map(MonteCarloAdaptiveResult::getLineup)
// The serialized result does not save the players stats
.map(lineup -> {
lineup.populateStats(battingData);
return lineup;
}).orElse(indexer.getLineup(0));
LineupComposite startingLineupComposite = new LineupComposite(startingLineup, hitGenerator, 0L);
SynchronizedLineupCompositeWrapper bestLineupComposite = new SynchronizedLineupCompositeWrapper(
startingLineupComposite);
// This is where the best lineups for each task wait to be added to a new task.
Queue<LineupComposite> winnersPool = new LinkedList<>();
// Candidate list contains all lineups before the 'lineupIndex' that have not
// yet been eliminated,
// they may been in the winners pool
// waiting to be added to a task, or they may have already been assigned to a
// task
Set<LineupComposite> candidateLineups = new LinkedHashSet<>();
Set<Long> savedCandidateLineupIndexes = Optional.ofNullable(existingResult)
.map(MonteCarloAdaptiveResult::getCandidateLineups).orElse(Collections.emptySet());
for (Long linupIndex : savedCandidateLineupIndexes) {
LineupComposite composite = new LineupComposite(indexer.getLineup(linupIndex), hitGenerator, linupIndex);
candidateLineups.add(composite);
winnersPool.add(composite);
}
// Queue up a few tasks to process (number of tasks is capped by
// TASK_BUFFER_SIZE)
long startIndex = Optional.ofNullable(existingResult).map(v -> v.getCountCompleted()).orElse(1L); // 0th lineup will
// already be
// added
lineupIndex = startIndex;
for (int i = 0; i < TASK_BUFFER_SIZE; i++) {
int taskSize = getNumberOfLineupsToAddToTask(indexer.size() - lineupIndex, parsedArguments.getThreads());
long savedLineupIndexerIndex = this.lineupIndex;
List<LineupComposite> lineupsToTest = getLineupsToTest(taskSize, winnersPool, hitGenerator, indexer);
long newLineupsAdded = this.lineupIndex - savedLineupIndexerIndex;
if (lineupsToTest.size() > 0) {
TTestTask task = new TTestTaskWithBestLineup(bestLineupComposite, lineupsToTest, INNINGS, ALPHA,
newLineupsAdded);
results.add(executor.submit(task));
}
}
// Process results, in order of submission, as soon as the earliest submitted
// task finishes
// executing
long progressCounter = startIndex;
while (!results.isEmpty()) {
// Wait for the result
TTestTaskResult result = null;
try {
Future<TTestTaskResult> future = results.poll();
if (future != null) {
result = future.get();
}
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
// Replace the best lineup if necessary
if (result.getBestLineupComposite() != null) { // Null means do nothing
// If the bestLineup is in the elimination list, update the bestLineup
boolean wasReplaced = bestLineupComposite.replaceIfCurrentIsInCollection(result.getBestLineupComposite(),
result.getEliminatedLineupComposites());
if (!wasReplaced) {
// Result lineup hasn't yet be compared to the current bestLineup (this should
// only happen in
// multithreaded use cases). Enqueue it for further evaluation.
winnersPool.add(result.getBestLineupComposite());
}
}
// Maintain the list of active lineups
candidateLineups.removeAll(result.getEliminatedLineupComposites());
if (result.getBestLineupComposite() != null) {
candidateLineups.add(result.getBestLineupComposite());
}
// Keep track of the number of simulations run so far
simulationsRun += result.getSimulationsRequired();
// Update the progress tracker
LineupComposite bestLineupCopy = bestLineupComposite.getCopyOfBestLineupComposite();
// Ugly cast :(
progressCounter += ((TTestTaskResultWithNewLineups) result).getNewLineupsProcessed();
// Logger.log(progressCounter + " of " + indexer.size() + " " +
// (result.getEliminatedLineupComposites().size()));
Set<Long> candidateLineupIndexes = candidateLineups.stream().map(LineupComposite::lineupIndex)
.collect(Collectors.toSet());
long elapsedTime = (System.currentTimeMillis() - startTimestamp)
+ Optional.ofNullable(existingResult).map(MonteCarloAdaptiveResult::getElapsedTimeMs).orElse(0l);
MonteCarloAdaptiveResult partialResult = new MonteCarloAdaptiveResult(bestLineupCopy.getLineup(),
bestLineupCopy.getStats().getMean(), indexer.size(), progressCounter - candidateLineups.size(), elapsedTime,
candidateLineupIndexes, simulationsRun, ResultStatusEnum.IN_PROGRESS);
progressTracker.updateProgress(partialResult);
// Add new tasks
int taskSize = getNumberOfLineupsToAddToTask(indexer.size() - lineupIndex, parsedArguments.getThreads());
long savedLineupIndexerIndex = this.lineupIndex;
List<LineupComposite> lineupsToTest = getLineupsToTest(taskSize, winnersPool, hitGenerator, indexer);
long newLineupsAdded = this.lineupIndex - savedLineupIndexerIndex;
if (lineupsToTest.size() > 0) {
TTestTask task = new TTestTaskWithBestLineup(bestLineupComposite, lineupsToTest, INNINGS, ALPHA,
newLineupsAdded);
results.add(executor.submit(task));
}
// Print a warning if the buffer gets low
ThreadPoolExecutor ex = (ThreadPoolExecutor) executor;
if (ex.getQueue().size() < parsedArguments.getThreads()
&& (indexer.size() - lineupIndex) > parsedArguments.getThreads()) {
Logger.log("WARNING: Task buffer is low and this may affect multithreaded performance. TaskSize "
+ ex.getQueue().size() + " " + (indexer.size() - lineupIndex));
}
}
executor.shutdown();
LineupComposite bestLineupCopy = bestLineupComposite.getCopyOfBestLineupComposite();
/*
* if (candidateLineupsGlobal.size() != 0) { throw new RuntimeException(
* "There should no lineups remaining, but there were " + candidateLineupsGlobal.size()); }
*/
// Make sure we've run at least MAX_ITERATIONS games on our final result so the
// expected score is
// accurate.
// This is especially important when using a cached result. Since stats objects
// can't be serialized
// and cached, a final result will have a score of NaN
if (bestLineupCopy.getStats().getN() < TTestTask.MAX_ITERATIONS) {
Logger.log("Top up iterations: " + (TTestTask.MAX_ITERATIONS - bestLineupCopy.getStats().getN()));
for (int i = 0; i < TTestTask.MAX_ITERATIONS - bestLineupCopy.getStats().getN(); i++) {
double score = MonteCarloGameSimulation.simulateGame(bestLineupCopy.getLineup(), INNINGS,
bestLineupCopy.getHitGenerator());
bestLineupCopy.addSample(score);
}
}
Set<Long> candidateLineupIndexes = candidateLineups.stream().map(v -> v.lineupIndex()).collect(Collectors.toSet());
long elapsedTime = (System.currentTimeMillis() - startTimestamp)
+ Optional.ofNullable(existingResult).map(v -> v.getElapsedTimeMs()).orElse(0l);
MonteCarloAdaptiveResult finalResult = new MonteCarloAdaptiveResult(bestLineupCopy.getLineup(),
bestLineupCopy.getStats().getMean(), indexer.size(), indexer.size(), elapsedTime, candidateLineupIndexes,
simulationsRun, ResultStatusEnum.COMPLETE);
progressTracker.updateProgress(finalResult);
return finalResult;
}
private List<LineupComposite> getLineupsToTest(int taskSize, Queue<LineupComposite> inProgressLineups,
HitGenerator hitGenerator, BattingLineupIndexer indexer) {
List<LineupComposite> lineupsToTest = new LinkedList<>(); // LinkedList becasue we only iterate and sometimes need
// to add elements to the beginning of the list
for (int j = 0; j < taskSize; j++) {
// First, add the best lineup we have so far to the task list. Make a copy so
// other threads can
// modify it.
// Having a good lineup in each task reduces the runtime because we don't waste
// much time comparing
// bad lineups to other bad lineups.
// lineupsToTest.add(new LineupComposite(bestLineup));
// First, get any in progress lineups from the queue and add them to the task
if (!inProgressLineups.isEmpty()) {
LineupComposite toEnqueue = inProgressLineups.remove();
lineupsToTest.add(toEnqueue);
continue;
}
// Second, get fresh lineups
if (lineupIndex < indexer.size()) {
LineupComposite composite = new LineupComposite(indexer.getLineup(lineupIndex), hitGenerator, lineupIndex);
lineupsToTest.add(composite);
lineupIndex++;
}
if (lineupIndex >= indexer.size()) {
// All done, no more lineups to enqueue for simulation
break;
}
}
return lineupsToTest;
}
private int getNumberOfLineupsToAddToTask(long remainingLineups, int numberOfThreads) {
long dynamicCap = remainingLineups / numberOfThreads;
if (dynamicCap > TASK_MAX_LINEUP_COUNT) {
return TASK_MAX_LINEUP_COUNT;
} else if (dynamicCap < TASK_MIN_LINEUP_COUNT) {
return TASK_MIN_LINEUP_COUNT;
} else {
return Math.toIntExact(dynamicCap);
}
}
private void validateData(DataStats data, List<String> playersInLineup) {
// All players in the lineup must have at least one plate appearance
for (String playerId : playersInLineup) {
DataPlayer player = data.getPlayerById(playerId);
if (player.getPlateAppearanceCount() == 0) {
throw new RuntimeException(Msg.PLAYER_HAS_NO_PA.args(player.getName(), player.getId()));
}
}
}
@Override
public Class<? extends Result> getResultClass() {
return MonteCarloAdaptiveResult.class;
}
}
| |
package io.biblia.workflows.manager.decision;
import java.util.Map;
import java.util.Set;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Collection;
import java.util.Date;
/**
* The simplified workflow keeps data of the workflow that
* will be enough for the algorithm to take decisions.
*
* @author dearj019
*
*/
public class SimplifiedWorkflowHistory {
/**
* Contains an adjacency list of the graph of action
* dependencies, where actions are identified
* by their output path.
*/
private final Map<Tuple, List<String>> adjacencyList;
/**
* Map from an action to a list of the workflows where this
* action was executed.
*/
private final Map<String, List<Long>> actionsWorkflows;
/**
* Contains the number of times an action happened.
*/
private final Map<String, Integer> actionsCount;
/**
* Map from an action output path to the rest
* of data that is needed from that action
* by the decision algorithm.
*/
private final Map<String, ActionData> actionsData;
public SimplifiedWorkflowHistory() {
this.adjacencyList = new HashMap<>();
this.actionsData = new HashMap<>();
this.actionsCount = new HashMap<>();
this.actionsWorkflows = new HashMap<>();
}
/**
* If the outputData has already been inserted, it keeps the last insertion
* data. Less simpler implementations may add other logic such as simply using
* the average of a list of entries.
* @param outputData the output dataset path of this action
* @param parentsOutputData the output dataset path of its parent actions
* @param workflowId The id of the workflow to which the action belonged when it was submitted.
* @param sizeInMB The size of the output data of the action in MB
* @param startTime The time when the action started being executed
* @param endTime The time when the action finished being executed.
*/
public void addAction(String outputPath, List<String> parentsOutputData,
Long workflowId, Double sizeInMB, Date startTime, Date endTime) {
//Updating actionsData
ActionData actionData = new ActionData(sizeInMB, startTime, endTime,
workflowId);
this.actionsData.put(outputPath, actionData);
//Updating actionsWorkflows
if (!this.actionsWorkflows.containsKey(outputPath)) {
List<Long> workflows = new LinkedList<>();
workflows.add(workflowId);
this.actionsWorkflows.put(outputPath, workflows);
}
else {
this.actionsWorkflows.get(outputPath).add(workflowId);
}
//Updating adjacencyList
Tuple tuple = new Tuple(workflowId, outputPath);
this.adjacencyList.put(tuple, parentsOutputData);
//Updating actionsCount
if (!this.actionsCount.containsKey(outputPath)) {
this.actionsCount.put(outputPath, Integer.valueOf(1));
}
else {
this.actionsCount.put(outputPath, this.actionsCount.get(outputPath) + 1);
}
}
public Set<String> getActions() {
return this.actionsData.keySet();
}
public List<String> getActionChildren(String actionOutputPath) {
return this.adjacencyList.get(actionOutputPath);
}
public Integer getActionCount(String actionOutputPath) {
return this.actionsCount.get(actionOutputPath);
}
public ActionData getActionData(String actionOutputPath) {
return this.actionsData.get(actionOutputPath);
}
public List<Long> getActionWorkflows(String actionOutputPath) {
return this.actionsWorkflows.get(actionOutputPath);
}
}
class Tuple {
private final Long workflowId;
private final String outputPath;
public Tuple(Long workflowId, String outputPath) {
this.workflowId = workflowId;
this.outputPath = outputPath;
}
public Long getWorkflowId() {
return workflowId;
}
public String getOutputPath() {
return outputPath;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((outputPath == null) ? 0 : outputPath.hashCode());
result = prime * result + ((workflowId == null) ? 0 : workflowId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Tuple other = (Tuple) obj;
if (outputPath == null) {
if (other.outputPath != null)
return false;
} else if (!outputPath.equals(other.outputPath))
return false;
if (workflowId == null) {
if (other.workflowId != null)
return false;
} else if (!workflowId.equals(other.workflowId))
return false;
return true;
}
}
class ActionData {
/**
* The id of the workflow to which this action belongs to.
*/
private final Long workflowId;
/**
* Size of the output dataset of the action
*/
private final double sizeInMB;
/**
* Time when computation started
*/
private final Date startTime;
/**
* Time when computation ended
*/
private final Date endTime;
public ActionData(double sizeInMB, Date startTime,
Date endTime, Long workflowId) {
this.sizeInMB = sizeInMB;
this.startTime = startTime;
this.endTime = endTime;
this.workflowId = workflowId;
}
public double getSizeInMB() {
return sizeInMB;
}
public Date getStartTime() {
return startTime;
}
public Date getEndTime() {
return endTime;
}
public Long getWorkflowId() {
return workflowId;
}
}
| |
package binarybricks.com.yelpql.details;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.maps.CameraUpdate;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapView;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import com.squareup.picasso.Picasso;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Locale;
import binarybricks.com.yelpql.R;
import binarybricks.com.yelpql.network.BusinessDetailsAPI;
import binarybricks.com.yelpql.network.model.Business;
import binarybricks.com.yelpql.utils.YelpDataUtil;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.functions.Consumer;
import yelp.BusinessDetails;
public class BusinessDetailsActivity extends AppCompatActivity {
private static final String RESTAURANT_ID = "Restaurant_ID";
private static final String LATITUDE = "latitude";
private static final String LONGITUDE = "longitude";
@BindView(R.id.svBusinessDetails)
ScrollView svBusinessDetails;
@BindView(R.id.ivRestaurant)
ImageView ivRestaurant;
@BindView(R.id.tvRestaurantName)
TextView tvRestaurantName;
@BindView(R.id.progressbar)
ProgressBar progressbar;
@BindView(R.id.ivRating)
ImageView ivRating;
@BindView(R.id.tvReviewsCount)
TextView tvReviewsCount;
@BindView(R.id.tvCost)
TextView tvCost;
@BindView(R.id.tvHours)
TextView tvHours;
@BindView(R.id.tvOpenToday)
TextView tvOpenToday;
@BindView(R.id.tvAddress)
TextView tvAddress;
@BindView(R.id.mvRestaurantLocation)
MapView mvRestaurantLocation;
@BindView(R.id.reviewsLayout)
ViewGroup reviewsLayout;
private String restaurantID;
private double latitude;
private double longitude;
private Business business;
private CompositeDisposable compositeDisposable;
public static Intent getBusinessDetailsIntent(@NonNull Context context, @NonNull String restaurantID, double latitude, double longitude) {
Intent intent = new Intent(context, BusinessDetailsActivity.class);
intent.putExtra(RESTAURANT_ID, restaurantID);
intent.putExtra(LATITUDE, latitude);
intent.putExtra(LONGITUDE, longitude);
return intent;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_business_details);
ButterKnife.bind(this);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
compositeDisposable = new CompositeDisposable();
restaurantID = getIntent().getStringExtra(RESTAURANT_ID);
latitude = getIntent().getDoubleExtra(LATITUDE, 0);
longitude = getIntent().getDoubleExtra(LONGITUDE, 0);
mvRestaurantLocation.onCreate(savedInstanceState);
loadRestaurantData(restaurantID, latitude, longitude);
}
private void loadRestaurantData(final String businessID, final double latitude, final double longitude) {
progressbar.setVisibility(View.VISIBLE);
compositeDisposable.add(BusinessDetailsAPI.getBusinessDetails(getString(R.string.apiKey), businessID, latitude, longitude)
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<Business>() {
@Override
public void accept(@io.reactivex.annotations.NonNull Business business) throws Exception {
progressbar.setVisibility(View.GONE);
bindData(business);
}
}
, new Consumer<Throwable>() {
@Override
public void accept(@io.reactivex.annotations.NonNull Throwable throwable) throws Exception {
progressbar.setVisibility(View.GONE);
Toast.makeText(BusinessDetailsActivity.this, throwable.getMessage(), Toast.LENGTH_LONG).show();
}
}));
}
private void bindData(Business business) {
this.business = business;
getSupportActionBar().setTitle(business.getName());
Picasso.get().load(business.getPhotos().get(0)).into(ivRestaurant);
tvRestaurantName.setText(business.getName());
YelpDataUtil.showRatingLogo(ivRating, business.getRating());
tvReviewsCount.setText(business.getReviewCount() + " Reviews");
tvCost.setText(business.getPrice());
tvHours.setText(YelpDataUtil.getTodaysHours(business.getHourList()));
if (business.isOpenNow()) {
tvOpenToday.setText(R.string.open);
} else {
tvOpenToday.setText(R.string.closed);
tvOpenToday.setTextColor(getResources().getColor(android.R.color.holo_red_dark));
}
tvAddress.setText(business.getFormattedAddress());
showPointerOnMap(business.getLatitude(), business.getLongitude());
showTopReviews(business.getReviewList());
}
private void showPointerOnMap(final double latitude, final double longitude) {
mvRestaurantLocation.getMapAsync(new OnMapReadyCallback() {
@Override
public void onMapReady(GoogleMap googleMap) {
LatLng latLng = new LatLng(latitude, longitude);
googleMap.addMarker(new MarkerOptions()
.icon(BitmapDescriptorFactory.fromResource(R.drawable.marker_flag))
.anchor(0.0f, 1.0f)
.position(latLng));
googleMap.getUiSettings().setMyLocationButtonEnabled(false);
googleMap.getUiSettings().setZoomControlsEnabled(true);
// Updates the location and zoom of the MapView
CameraUpdate cameraUpdate = CameraUpdateFactory.newLatLngZoom(latLng, 15);
googleMap.moveCamera(cameraUpdate);
}
});
}
private void showTopReviews(List<BusinessDetails.Review> reviewList) {
LayoutInflater layoutInflater = getLayoutInflater();
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.getDefault());
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
params.topMargin = 32;
for (BusinessDetails.Review review : reviewList) {
reviewsLayout.addView(getUserReviewSection(review, layoutInflater, dateFormat), params);
}
}
private View getUserReviewSection(BusinessDetails.Review review, LayoutInflater layoutInflater, DateFormat dateFormat) {
View reviewView = layoutInflater.inflate(R.layout.business_reviews, null);
Picasso.get().load(review.user().image_url()).into((ImageView) reviewView.findViewById(R.id.ivUserImage));
((TextView) reviewView.findViewById(R.id.tvUserName)).setText(review.user().name());
YelpDataUtil.showRatingLogo((ImageView) reviewView.findViewById(R.id.ivRating), String.valueOf(review.rating()));
((TextView) reviewView.findViewById(R.id.tvUserReview)).setText(review.text());
try {
((TextView) reviewView.findViewById(R.id.tvUserRatingTime)).setText(YelpDataUtil.getDuration(this, dateFormat.parse(review.time_created()).getTime()));
} catch (ParseException e) {
e.printStackTrace();
}
return reviewView;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
return true;
}
return super.onOptionsItemSelected(item);
}
@OnClick(R.id.tvPhoneNumber)
void openRestaurantPhoneNo(View view) {
if (business != null) {
Intent i = new Intent(Intent.ACTION_DIAL);
i.setData(Uri.fromParts("tel", business.getPhone(), null));
startActivity(i);
}
}
@OnClick(R.id.tvDirection)
void openRestaurantDirection(View view) {
if (business != null) {
// Create a Uri from an intent string. Use the result to create an Intent.
Uri gmmIntentUri = Uri.parse(String.format("google.streetview:cbll=%1$f,%2%f", business.getLatitude(), business.getLongitude()));
// Create an Intent from gmmIntentUri. Set the action to ACTION_VIEW
Intent mapIntent = new Intent(Intent.ACTION_VIEW, gmmIntentUri);
// Make the Intent explicit by setting the Google Maps package
mapIntent.setPackage("com.google.android.apps.maps");
// Attempt to start an activity that can handle the Intent
startActivity(mapIntent);
}
}
@OnClick(R.id.tvWebsite)
void openRestaurantWebsite(View view) {
if (business != null) {
Intent i = new Intent(Intent.ACTION_VIEW);
i.setData(Uri.parse(business.getUrl()));
startActivity(i);
}
}
@OnClick(R.id.tvReviews)
void openRestaurantReviews(View view) {
svBusinessDetails.smoothScrollTo(0, reviewsLayout.getTop());
}
@Override
protected void onStop() {
super.onStop();
compositeDisposable.dispose();
}
@Override
public void onResume() {
mvRestaurantLocation.onResume();
super.onResume();
}
@Override
public void onPause() {
super.onPause();
mvRestaurantLocation.onPause();
}
@Override
public void onDestroy() {
super.onDestroy();
mvRestaurantLocation.onDestroy();
}
@Override
public void onLowMemory() {
super.onLowMemory();
mvRestaurantLocation.onLowMemory();
}
}
| |
/*
* Copyright (c) 2005, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.tools;
import java.io.File;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.MalformedURLException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.logging.Logger;
import java.util.logging.Level;
import static java.util.logging.Level.*;
/**
* Provides methods for locating tool providers, for example,
* providers of compilers. This class complements the
* functionality of {@link java.util.ServiceLoader}.
*
* @author Peter von der Ahé
* @since 1.6
*/
public class ToolProvider {
private static final String propertyName = "sun.tools.ToolProvider";
private static final String loggerName = "javax.tools";
/*
* Define the system property "sun.tools.ToolProvider" to enable
* debugging:
*
* java ... -Dsun.tools.ToolProvider ...
*/
static <T> T trace(Level level, Object reason) {
// NOTE: do not make this method private as it affects stack traces
try {
if (System.getProperty(propertyName) != null) {
StackTraceElement[] st = Thread.currentThread().getStackTrace();
String method = "???";
String cls = ToolProvider.class.getName();
if (st.length > 2) {
StackTraceElement frame = st[2];
method = String.format((Locale)null, "%s(%s:%s)",
frame.getMethodName(),
frame.getFileName(),
frame.getLineNumber());
cls = frame.getClassName();
}
Logger logger = Logger.getLogger(loggerName);
if (reason instanceof Throwable) {
logger.logp(level, cls, method,
reason.getClass().getName(), (Throwable)reason);
} else {
logger.logp(level, cls, method, String.valueOf(reason));
}
}
} catch (SecurityException ex) {
System.err.format((Locale)null, "%s: %s; %s%n",
ToolProvider.class.getName(),
reason,
ex.getLocalizedMessage());
}
return null;
}
private static final String defaultJavaCompilerName
= "com.sun.tools.javac.api.JavacTool";
/**
* Gets the Java™ programming language compiler provided
* with this platform.
* @return the compiler provided with this platform or
* {@code null} if no compiler is provided
*/
public static JavaCompiler getSystemJavaCompiler() {
return instance().getSystemTool(JavaCompiler.class, defaultJavaCompilerName);
}
/**
* Returns the class loader for tools provided with this platform.
* This does not include user-installed tools. Use the
* {@linkplain java.util.ServiceLoader service provider mechanism}
* for locating user installed tools.
*
* @return the class loader for tools provided with this platform
* or {@code null} if no tools are provided
*/
public static ClassLoader getSystemToolClassLoader() {
try {
Class<? extends JavaCompiler> c =
instance().getSystemToolClass(JavaCompiler.class, defaultJavaCompilerName);
return c.getClassLoader();
} catch (Throwable e) {
return trace(WARNING, e);
}
}
private static ToolProvider instance;
private static synchronized ToolProvider instance() {
if (instance == null)
instance = new ToolProvider();
return instance;
}
// Cache for tool classes.
// Use weak references to avoid keeping classes around unnecessarily
private Map<String, Reference<Class<?>>> toolClasses = new HashMap<String, Reference<Class<?>>>();
// Cache for tool classloader.
// Use a weak reference to avoid keeping it around unnecessarily
private Reference<ClassLoader> refToolClassLoader = null;
private ToolProvider() { }
private <T> T getSystemTool(Class<T> clazz, String name) {
Class<? extends T> c = getSystemToolClass(clazz, name);
try {
return c.asSubclass(clazz).newInstance();
} catch (Throwable e) {
trace(WARNING, e);
return null;
}
}
private <T> Class<? extends T> getSystemToolClass(Class<T> clazz, String name) {
Reference<Class<?>> refClass = toolClasses.get(name);
Class<?> c = (refClass == null ? null : refClass.get());
if (c == null) {
try {
c = findSystemToolClass(name);
} catch (Throwable e) {
return trace(WARNING, e);
}
toolClasses.put(name, new WeakReference<Class<?>>(c));
}
return c.asSubclass(clazz);
}
private static final String[] defaultToolsLocation = { "lib", "tools.jar" };
private Class<?> findSystemToolClass(String toolClassName)
throws MalformedURLException, ClassNotFoundException
{
// try loading class directly, in case tool is on the bootclasspath
try {
return Class.forName(toolClassName, false, null);
} catch (ClassNotFoundException e) {
trace(FINE, e);
// if tool not on bootclasspath, look in default tools location (tools.jar)
ClassLoader cl = (refToolClassLoader == null ? null : refToolClassLoader.get());
if (cl == null) {
File file = new File(System.getProperty("java.home"));
if (file.getName().equalsIgnoreCase("jre"))
file = file.getParentFile();
for (String name : defaultToolsLocation)
file = new File(file, name);
// if tools not found, no point in trying a URLClassLoader
// so rethrow the original exception.
if (!file.exists())
throw e;
URL[] urls = { file.toURI().toURL() };
trace(FINE, urls[0].toString());
cl = URLClassLoader.newInstance(urls);
refToolClassLoader = new WeakReference<ClassLoader>(cl);
}
return Class.forName(toolClassName, false, cl);
}
}
}
| |
package com.productlayer.rest.client.services;
import java.util.HashMap;
import java.util.Map;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.web.client.RestClientException;
import com.productlayer.core.beans.Opine;
import com.productlayer.core.beans.reports.ProblemReport;
import com.productlayer.core.error.PLYHttpException;
import com.productlayer.core.utils.StringUtils;
import com.productlayer.rest.client.PLYRestClient;
import com.productlayer.rest.client.helper.UrlHelper;
/**
* Methods for managing opines.
*/
public class OpineService {
/**
* Posts an opine. If the user earns points for this operation
* 'X-ProductLayer-User-Points' and 'X-ProductLayer-User-Points-Changed'
* will be present in the response header.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param opine
* The opine
* @return The newly created opine
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static Opine createOpine(PLYRestClient client, Opine opine) {
String url = "/opines";
ResponseEntity<Opine> response = client.exchangeWithObject(url, HttpMethod.POST, opine, Opine.class,
null);
return response.getBody();
}
/**
* Deletes an opine. Only the owner of the opine can delete it. If the user
* earns points for this operation 'X-ProductLayer-User-Points' and
* 'X-ProductLayer-User-Points-Changed' will be present in the response
* header.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param opineID
* The identifier of the opine
* @return The deleted opine
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static Opine deleteOpine(PLYRestClient client, String opineID) {
String url = "/opine/" + opineID;
ResponseEntity<Opine> response = client.exchange(url, HttpMethod.DELETE, Opine.class, null);
return response.getBody();
}
/**
* Downvotes a specific opine.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param opineID
* The identifier of the opine
* @return The opine with the new vote score
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static Opine downVoteOpine(PLYRestClient client, String opineID) {
String url = "/opine/" + opineID + "/down_vote";
ResponseEntity<Opine> response = client.exchange(url, HttpMethod.POST, Opine.class, null);
return response.getBody();
}
/**
* Gets a specific opine.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param opineID
* The identifier of the opine
* @return The identified opine
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static Opine getOpine(PLYRestClient client, String opineID) {
String url = "/opine/" + opineID;
ResponseEntity<Opine> response = client.exchange(url, HttpMethod.GET, Opine.class, null);
return response.getBody();
}
/**
* Sends a report about copyright infringements or any other problems with
* the opine.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param opineID
* The identifier of the opine
* @param report
* The report
* @return The problem report object
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static ProblemReport reportOpine(PLYRestClient client, String opineID, ProblemReport report) {
String url = "/opine/report_problem";
Map<String, String> parameters = new HashMap<String, String>();
if (!StringUtils.isEmpty(opineID)) {
parameters.put("opine_id", opineID);
}
url = UrlHelper.addQueryParameterPlaceholderToUrl(url, parameters);
ResponseEntity<ProblemReport> response = client.exchangeWithObject(url, HttpMethod.POST, report,
ProblemReport.class, parameters);
return response.getBody();
}
/**
* Searches for an opine.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param page
* [Optional] The page to be displayed starting with 0 - if no
* page has been provided, the first page will be shown
* @param recordsPerPage
* [Optional] The amount of items to be displayed per page,
* default: '200'
* @param gtin
* [Optional] The GTIN (barcode) of the product
* @param language
* [Optional] The preferred language (e.g.: 'en' or 'de')
* @param showFriendsOnly
* [Optional] Show only content created by friends (followed
* users), default: 'false'
* @param nickname
* [Optional] The nickname of the user
* @param userID
* [Optional] The identifier of the user
* @param order_by
* [Optional] Used to sort the result-set by one or more columns.
* The order by parameters are <strong>seperated by a
* semicolon</strong>. Also you need to provide a prefix
* <strong>asc for ascending</strong> or <strong>desc for
* descending order</strong><br>
* <br>
* <strong>Default:</strong> pl-created-time_asc (The date the
* opine was created ascending)
* @return Any opines matching the specified criteria
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static Opine[] searchOpines(PLYRestClient client, Integer page, Integer recordsPerPage,
String gtin, String language, Boolean showFriendsOnly, String nickname, String userID,
String order_by) {
String url = "/opines";
Map<String, String> parameters = new HashMap<String, String>();
if (!StringUtils.isEmpty(page)) {
parameters.put("page", page.toString());
}
if (!StringUtils.isEmpty(recordsPerPage)) {
parameters.put("records_per_page", recordsPerPage.toString());
}
if (!StringUtils.isEmpty(gtin)) {
parameters.put("gtin", gtin);
}
if (!StringUtils.isEmpty(language)) {
parameters.put("language", language);
}
if (!StringUtils.isEmpty(showFriendsOnly)) {
parameters.put("show_friends_only", showFriendsOnly.toString());
}
if (!StringUtils.isEmpty(nickname)) {
parameters.put("nickname", nickname);
}
if (!StringUtils.isEmpty(userID)) {
parameters.put("user_id", userID);
}
if (!StringUtils.isEmpty(order_by)) {
parameters.put("order_by", order_by);
}
url = UrlHelper.addQueryParameterPlaceholderToUrl(url, parameters);
ResponseEntity<Opine[]> response = client.exchange(url, HttpMethod.GET, Opine[].class, parameters);
return response.getBody();
}
/**
* Upvotes a specific opine.
*
* @param client
* The REST client configured to handle communications with the
* ProductLayer API server
* @param opineID
* The identifier of the opine
* @return The opine with the new vote score
* @throws PLYHttpException
* On any HTTP status code indicating failure
* @throws RestClientException
* On any client-side HTTP error
*/
public static Opine upVoteOpine(PLYRestClient client, String opineID) {
String url = "/opine/" + opineID + "/up_vote";
ResponseEntity<Opine> response = client.exchange(url, HttpMethod.POST, Opine.class, null);
return response.getBody();
}
}
| |
package io.dropwizard.migrations;
import io.dropwizard.Configuration;
import io.dropwizard.db.DatabaseConfiguration;
import liquibase.CatalogAndSchema;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.diff.DiffGeneratorFactory;
import liquibase.diff.DiffResult;
import liquibase.diff.compare.CompareControl;
import liquibase.diff.output.DiffOutputControl;
import liquibase.diff.output.changelog.DiffToChangeLog;
import liquibase.exception.DatabaseException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.InvalidExampleException;
import liquibase.snapshot.SnapshotControl;
import liquibase.snapshot.SnapshotGeneratorFactory;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.Column;
import liquibase.structure.core.Data;
import liquibase.structure.core.ForeignKey;
import liquibase.structure.core.Index;
import liquibase.structure.core.PrimaryKey;
import liquibase.structure.core.Sequence;
import liquibase.structure.core.Table;
import liquibase.structure.core.UniqueConstraint;
import liquibase.structure.core.View;
import net.sourceforge.argparse4j.impl.Arguments;
import net.sourceforge.argparse4j.inf.ArgumentGroup;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
public class DbDumpCommand<T extends Configuration> extends AbstractLiquibaseCommand<T> {
private PrintStream outputStream = System.out;
public DbDumpCommand(DatabaseConfiguration<T> strategy, Class<T> configurationClass, String migrationsFileName) {
super("dump",
"Generate a dump of the existing database state.",
strategy,
configurationClass,
migrationsFileName);
}
void setOutputStream(PrintStream outputStream) {
this.outputStream = outputStream;
}
@Override
public void configure(Subparser subparser) {
super.configure(subparser);
subparser.addArgument("-o", "--output")
.dest("output")
.help("Write output to <file> instead of stdout");
final ArgumentGroup tables = subparser.addArgumentGroup("Tables");
tables.addArgument("--tables")
.action(Arguments.storeTrue())
.dest("tables")
.help("Check for added or removed tables (default)");
tables.addArgument("--ignore-tables")
.action(Arguments.storeFalse())
.dest("tables")
.help("Ignore tables");
final ArgumentGroup columns = subparser.addArgumentGroup("Columns");
columns.addArgument("--columns")
.action(Arguments.storeTrue())
.dest("columns")
.help("Check for added, removed, or modified columns (default)");
columns.addArgument("--ignore-columns")
.action(Arguments.storeFalse())
.dest("columns")
.help("Ignore columns");
final ArgumentGroup views = subparser.addArgumentGroup("Views");
views.addArgument("--views")
.action(Arguments.storeTrue())
.dest("views")
.help("Check for added, removed, or modified views (default)");
views.addArgument("--ignore-views")
.action(Arguments.storeFalse())
.dest("views")
.help("Ignore views");
final ArgumentGroup primaryKeys = subparser.addArgumentGroup("Primary Keys");
primaryKeys.addArgument("--primary-keys")
.action(Arguments.storeTrue())
.dest("primary-keys")
.help("Check for changed primary keys (default)");
primaryKeys.addArgument("--ignore-primary-keys")
.action(Arguments.storeFalse())
.dest("primary-keys")
.help("Ignore primary keys");
final ArgumentGroup uniqueConstraints = subparser.addArgumentGroup("Unique Constraints");
uniqueConstraints.addArgument("--unique-constraints")
.action(Arguments.storeTrue())
.dest("unique-constraints")
.help("Check for changed unique constraints (default)");
uniqueConstraints.addArgument("--ignore-unique-constraints")
.action(Arguments.storeFalse())
.dest("unique-constraints")
.help("Ignore unique constraints");
final ArgumentGroup indexes = subparser.addArgumentGroup("Indexes");
indexes.addArgument("--indexes")
.action(Arguments.storeTrue())
.dest("indexes")
.help("Check for changed indexes (default)");
indexes.addArgument("--ignore-indexes")
.action(Arguments.storeFalse())
.dest("indexes")
.help("Ignore indexes");
final ArgumentGroup foreignKeys = subparser.addArgumentGroup("Foreign Keys");
foreignKeys.addArgument("--foreign-keys")
.action(Arguments.storeTrue())
.dest("foreign-keys")
.help("Check for changed foreign keys (default)");
foreignKeys.addArgument("--ignore-foreign-keys")
.action(Arguments.storeFalse())
.dest("foreign-keys")
.help("Ignore foreign keys");
final ArgumentGroup sequences = subparser.addArgumentGroup("Sequences");
sequences.addArgument("--sequences")
.action(Arguments.storeTrue())
.dest("sequences")
.help("Check for changed sequences (default)");
sequences.addArgument("--ignore-sequences")
.action(Arguments.storeFalse())
.dest("sequences")
.help("Ignore sequences");
final ArgumentGroup data = subparser.addArgumentGroup("Data");
data.addArgument("--data")
.action(Arguments.storeTrue())
.dest("data")
.help("Check for changed data")
.setDefault(Boolean.FALSE);
data.addArgument("--ignore-data")
.action(Arguments.storeFalse())
.dest("data")
.help("Ignore data (default)")
.setDefault(Boolean.FALSE);
}
@Override
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public void run(Namespace namespace, Liquibase liquibase) throws Exception {
final Set<Class<? extends DatabaseObject>> compareTypes = new HashSet<>();
if (isTrue(namespace.getBoolean("columns"))) {
compareTypes.add(Column.class);
}
if (isTrue(namespace.getBoolean("data"))) {
compareTypes.add(Data.class);
}
if (isTrue(namespace.getBoolean("foreign-keys"))) {
compareTypes.add(ForeignKey.class);
}
if (isTrue(namespace.getBoolean("indexes"))) {
compareTypes.add(Index.class);
}
if (isTrue(namespace.getBoolean("primary-keys"))) {
compareTypes.add(PrimaryKey.class);
}
if (isTrue(namespace.getBoolean("sequences"))) {
compareTypes.add(Sequence.class);
}
if (isTrue(namespace.getBoolean("tables"))) {
compareTypes.add(Table.class);
}
if (isTrue(namespace.getBoolean("unique-constraints"))) {
compareTypes.add(UniqueConstraint.class);
}
if (isTrue(namespace.getBoolean("views"))) {
compareTypes.add(View.class);
}
final DiffToChangeLog diffToChangeLog = new DiffToChangeLog(new DiffOutputControl());
final Database database = liquibase.getDatabase();
final String filename = namespace.getString("output");
if (filename != null) {
try (PrintStream file = new PrintStream(filename, StandardCharsets.UTF_8.name())) {
generateChangeLog(database, database.getDefaultSchema(), diffToChangeLog, file, compareTypes);
}
} else {
generateChangeLog(database, database.getDefaultSchema(), diffToChangeLog, outputStream, compareTypes);
}
}
private void generateChangeLog(final Database database, final CatalogAndSchema catalogAndSchema,
final DiffToChangeLog changeLogWriter, PrintStream outputStream,
final Set<Class<? extends DatabaseObject>> compareTypes)
throws DatabaseException, IOException, ParserConfigurationException {
@SuppressWarnings("unchecked")
final SnapshotControl snapshotControl = new SnapshotControl(database,
compareTypes.toArray(new Class[compareTypes.size()]));
final CompareControl compareControl = new CompareControl(new CompareControl.SchemaComparison[]{
new CompareControl.SchemaComparison(catalogAndSchema, catalogAndSchema)}, compareTypes);
final CatalogAndSchema[] compareControlSchemas = compareControl
.getSchemas(CompareControl.DatabaseRole.REFERENCE);
try {
final DatabaseSnapshot referenceSnapshot = SnapshotGeneratorFactory.getInstance()
.createSnapshot(compareControlSchemas, database, snapshotControl);
final DatabaseSnapshot comparisonSnapshot = SnapshotGeneratorFactory.getInstance()
.createSnapshot(compareControlSchemas, null, snapshotControl);
final DiffResult diffResult = DiffGeneratorFactory.getInstance()
.compare(referenceSnapshot, comparisonSnapshot, compareControl);
changeLogWriter.setDiffResult(diffResult);
changeLogWriter.print(outputStream);
} catch (InvalidExampleException e) {
throw new UnexpectedLiquibaseException(e);
}
}
private static boolean isTrue(Boolean nullableCondition) {
return nullableCondition != null && nullableCondition;
}
}
| |
/**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mina.core;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import org.apache.mina.core.service.IoAcceptor;
import org.apache.mina.core.service.IoConnector;
import org.apache.mina.core.service.IoHandler;
import org.apache.mina.core.service.IoService;
import org.apache.mina.core.service.IoServiceListener;
import org.apache.mina.core.service.IoServiceListenerSupport;
import org.apache.mina.core.session.DummySession;
import org.easymock.EasyMock;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
/**
* Tests {@link IoServiceListenerSupport}.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public class IoServiceListenerSupportTest {
private static final SocketAddress ADDRESS = new InetSocketAddress(8080);
private final IoService mockService = EasyMock.createMock(IoService.class);
@Test
public void testServiceLifecycle() throws Exception {
IoServiceListenerSupport support = new IoServiceListenerSupport(
mockService);
IoServiceListener listener = EasyMock.createStrictMock(IoServiceListener.class);
// Test activation
listener.serviceActivated(mockService);
EasyMock.replay(listener);
support.add(listener);
support.fireServiceActivated();
EasyMock.verify(listener);
// Test deactivation & other side effects
EasyMock.reset(listener);
listener.serviceDeactivated(mockService);
EasyMock.replay(listener);
//// Activate more than once
support.fireServiceActivated();
//// Deactivate
support.fireServiceDeactivated();
//// Deactivate more than once
support.fireServiceDeactivated();
EasyMock.verify(listener);
}
@Test
public void testSessionLifecycle() throws Exception {
IoServiceListenerSupport support = new IoServiceListenerSupport(
mockService);
DummySession session = new DummySession();
session.setService(mockService);
session.setLocalAddress(ADDRESS);
IoHandler handler = EasyMock.createStrictMock( IoHandler.class );
session.setHandler(handler);
IoServiceListener listener = EasyMock.createStrictMock(IoServiceListener.class);
// Test creation
listener.sessionCreated(session);
handler.sessionCreated(session);
handler.sessionOpened(session);
EasyMock.replay(listener);
EasyMock.replay(handler);
support.add(listener);
support.fireSessionCreated(session);
EasyMock.verify(listener);
EasyMock.verify(handler);
assertEquals(1, support.getManagedSessions().size());
assertSame(session, support.getManagedSessions().get(session.getId()));
// Test destruction & other side effects
EasyMock.reset(listener);
EasyMock.reset(handler);
handler.sessionClosed(session);
listener.sessionDestroyed(session);
EasyMock.replay(listener);
//// Activate more than once
support.fireSessionCreated(session);
//// Deactivate
support.fireSessionDestroyed(session);
//// Deactivate more than once
support.fireSessionDestroyed(session);
EasyMock.verify(listener);
assertTrue(session.isClosing());
assertEquals(0, support.getManagedSessions().size());
assertNull(support.getManagedSessions().get(session.getId()));
}
@Test
public void testDisconnectOnUnbind() throws Exception {
IoAcceptor acceptor = EasyMock.createStrictMock(IoAcceptor.class);
final IoServiceListenerSupport support = new IoServiceListenerSupport(
acceptor);
final DummySession session = new DummySession();
session.setService(acceptor);
session.setLocalAddress(ADDRESS);
IoHandler handler = EasyMock.createStrictMock(IoHandler.class);
session.setHandler(handler);
final IoServiceListener listener = EasyMock.createStrictMock(IoServiceListener.class);
// Activate a service and create a session.
listener.serviceActivated(acceptor);
listener.sessionCreated(session);
handler.sessionCreated(session);
handler.sessionOpened(session);
EasyMock.replay(listener);
EasyMock.replay(handler);
support.add(listener);
support.fireServiceActivated();
support.fireSessionCreated(session);
EasyMock.verify(listener);
EasyMock.verify(handler);
// Deactivate a service and make sure the session is closed & destroyed.
EasyMock.reset(listener);
EasyMock.reset(handler);
listener.serviceDeactivated(acceptor);
EasyMock.expect(acceptor.isCloseOnDeactivation()).andReturn(true);
listener.sessionDestroyed(session);
handler.sessionClosed(session);
EasyMock.replay(listener);
EasyMock.replay(acceptor);
EasyMock.replay(handler);
new Thread() {
// Emulate I/O service
@Override
public void run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
//e.printStackTrace();
}
// This synchronization block is a workaround for
// the visibility problem of simultaneous EasyMock
// state update. (not sure if it fixes the failing test yet.)
synchronized (listener) {
support.fireSessionDestroyed(session);
}
}
}.start();
support.fireServiceDeactivated();
synchronized (listener) {
EasyMock.verify(listener);
}
EasyMock.verify(acceptor);
EasyMock.verify(handler);
assertTrue(session.isClosing());
assertEquals(0, support.getManagedSessions().size());
assertNull(support.getManagedSessions().get(session.getId()));
}
@Test
public void testConnectorActivation() throws Exception {
IoConnector connector = EasyMock.createStrictMock(IoConnector.class);
IoServiceListenerSupport support = new IoServiceListenerSupport(
connector);
final DummySession session = new DummySession();
session.setService(connector);
session.setRemoteAddress(ADDRESS);
IoHandler handler = EasyMock.createStrictMock(IoHandler.class);
session.setHandler(handler);
IoServiceListener listener = EasyMock.createStrictMock(IoServiceListener.class);
// Creating a session should activate a service automatically.
listener.serviceActivated(connector);
listener.sessionCreated(session);
handler.sessionCreated(session);
handler.sessionOpened(session);
EasyMock.replay(listener);
EasyMock.replay(handler);
support.add(listener);
support.fireSessionCreated(session);
EasyMock.verify(listener);
EasyMock.verify(handler);
// Destroying a session should deactivate a service automatically.
EasyMock.reset(listener);
EasyMock.reset(handler);
listener.sessionDestroyed(session);
handler.sessionClosed(session);
listener.serviceDeactivated(connector);
EasyMock.replay(listener);
EasyMock.replay(handler);
support.fireSessionDestroyed(session);
EasyMock.verify(listener);
EasyMock.verify(handler);
assertEquals(0, support.getManagedSessions().size());
assertNull(support.getManagedSessions().get(session.getId()));
}
}
| |
/**
* Copyright (c) 2008 Andrew Rapp. All rights reserved.
*
* This file is part of XBee-API.
*
* XBee-API is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* XBee-API is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBee-API. If not, see <http://www.gnu.org/licenses/>.
*/
package com.rapplogic.xbee.api;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Logger;
import com.rapplogic.xbee.RxTxSerialComm;
import com.rapplogic.xbee.XBeeConnection;
import com.rapplogic.xbee.api.HardwareVersion.RadioType;
import com.rapplogic.xbee.util.ByteUtils;
/**
* This is an API for communicating with Digi XBee 802.15.4 and ZigBee radios
* via the serial port
* <p/>
* @author Andrew Rapp <andrew.rapp at gmail>
*
*/
public class XBee implements IXBee {
private final static Logger log = Logger.getLogger(XBee.class);
// object to synchronize on to protect access to sendPacket
private Object sendPacketBlock = new Object();
private XBeeConnection xbeeConnection;
private InputStreamThread parser;
private XBeeConfiguration conf;
private RadioType type;
public XBee() {
this(new XBeeConfiguration().withMaxQueueSize(100).withStartupChecks(true));
}
public XBee(XBeeConfiguration conf) {
this.conf = conf;
if (this.conf.isShutdownHook()) {
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
if (isConnected()) {
log.info("ShutdownHook is closing connection");
close();
}
}
});
}
}
private void doStartupChecks() throws XBeeException {
// Perform startup checks
try {
AtCommandResponse ap = this.sendAtCommand(new AtCommand("AP"));
if (!ap.isOk()) {
throw new XBeeException("Attempt to query AP parameter failed");
}
if (ap.getValue()[0] != 2) {
log.warn("XBee radio is in API mode without escape characters (AP=1). The radio must be configured in API mode with escape bytes (AP=2) for use with this library.");
log.info("Attempting to set AP to 2");
ap = this.sendAtCommand(new AtCommand("AP", 2));
if (ap.isOk()) {
log.info("Successfully set AP mode to 2. This setting will not persist a power cycle without the WR (write) command");
} else {
throw new XBeeException("Attempt to set AP=2 failed");
}
} else {
log.info("Radio is in correct AP mode (AP=2)");
}
ap = this.sendAtCommand(new AtCommand("HV"));
RadioType radioType = HardwareVersion.parse(ap);
log.info("XBee radio is " + radioType);
if (radioType == RadioType.UNKNOWN) {
log.warn("Unknown radio type (HV): " + ap.getValue()[0]);
}
AtCommandResponse vr = this.sendAtCommand(new AtCommand("VR"));
if (vr.isOk()) {
log.info("Firmware version is " + ByteUtils.toBase16(vr.getValue()));
}
this.clearResponseQueue();
} catch (XBeeTimeoutException ex) {
throw new XBeeException("AT command timed-out while attempt to set/read in API mode. Check that the XBee radio is in API mode (AP=2); it will not function propertly in AP=1");
}
}
/**
* If XBeeConnection.startUpChecks is set to true (default), this method will check if the AP parameter
* is set correctly and attempt to update if AP=1. If AP=0 (Transparent mode), an
* exception will be thrown.
*/
public void open(String port, int baudRate) throws XBeeException {
try {
if (this.isConnected()) {
throw new IllegalStateException("Cannot open new connection -- existing connection is still open. Please close first");
}
this.type = null;
RxTxSerialComm serial = new RxTxSerialComm();
serial.openSerialPort(port, baudRate);
this.initConnection(serial);
} catch (XBeeException e) {
throw e;
} catch (Exception e) {
throw new XBeeException(e);
}
}
public static void registerResponseHandler(int apiId, Class<? extends XBeeResponse> clazz) {
PacketParser.registerResponseHandler(apiId, clazz);
}
public static void unRegisterResponseHandler(int apiId) {
PacketParser.unRegisterResponseHandler(apiId);
}
/**
* Allows a protocol specific implementation of XBeeConnection to be used instead of the default RXTX connection.
* The connection must already be established as the interface has no means to do so.
*/
public void initProviderConnection(XBeeConnection connection) throws XBeeException {
if (this.isConnected()) {
throw new IllegalStateException("Cannot open new connection -- existing connection is still open. Please close first");
}
initConnection(connection);
}
private void initConnection(XBeeConnection conn) throws XBeeException {
try {
this.xbeeConnection = conn;
parser = new InputStreamThread(this.xbeeConnection, conf);
// startup heuristics
if (conf.isStartupChecks()) {
this.doStartupChecks();
}
} catch (XBeeException e) {
throw e;
} catch (Exception e) {
throw new XBeeException(e);
}
}
public void addPacketListener(PacketListener packetListener) {
if (parser == null) {
throw new IllegalStateException("No connection");
}
synchronized (parser.getPacketListenerList()) {
this.parser.getPacketListenerList().add(packetListener);
}
}
public void removePacketListener(PacketListener packetListener) {
if (parser == null) {
throw new IllegalStateException("No connection");
}
synchronized (parser.getPacketListenerList()) {
this.parser.getPacketListenerList().remove(packetListener);
}
}
public void sendRequest(XBeeRequest request) throws IOException {
if (this.type != null) {
// TODO use interface to mark series type
if (type == RadioType.SERIES1 && request.getClass().getPackage().getName().indexOf("api.zigbee") > -1) {
throw new IllegalArgumentException("You are connected to a Series 1 radio but attempting to send Series 2 requests");
} else if (type == RadioType.SERIES2 && request.getClass().getPackage().getName().indexOf("api.wpan") > -1) {
throw new IllegalArgumentException("You are connected to a Series 2 radio but attempting to send Series 1 requests");
}
}
log.info("Sending request to XBee: " + request);
this.sendPacket(request.getXBeePacket());
}
/**
* It's possible for packets to get interspersed if multiple threads send simultaneously.
* This method is not thread-safe because doing so would introduce a synchronized performance penalty
* for the vast majority of users that will not never need thread safety.
* That said, it is responsibility of the user to provide synchronization if multiple threads are sending.
*
* Not thread safe.
*
* @param packet
* @throws IOException
*/
public void sendPacket(XBeePacket packet) throws IOException {
this.sendPacket(packet.getByteArray());
}
/**
* This exists solely for the XMPP project. Use sendRequest instead
*
* Not Thread Safe
*
* @param packet
* @throws RuntimeException when serial device is disconnected
*/
public void sendPacket(int[] packet) throws IOException {
// TODO should we synchronize on read lock so we are sending/recv. simultaneously?
// TODO call request listener with byte array
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
if (log.isInfoEnabled()) {
log.info("Sending packet to XBee " + ByteUtils.toBase16(packet));
}
for (int packetByte : packet) {
// if connection lost
//Caused by: com.rapplogic.xbee.api.XBeeException
//Caused by: java.io.IOException: Input/output error in writeArray
xbeeConnection.getOutputStream().write(packetByte);
}
xbeeConnection.getOutputStream().flush();
}
/**
* Sends an XBeeRequest though the XBee interface in an asynchronous manner, such that
* it will return immediately, without waiting for a response.
* Refer to the getResponse method for obtaining a response
*
* Not thread safe
*
* @param request
* @throws XBeeException
*/
public void sendAsynchronous(XBeeRequest request) throws XBeeException {
try {
this.sendRequest(request);
} catch (Exception e) {
throw new XBeeException(e);
}
}
/**
* Uses sendSynchronous to send an AtCommand and collect the response
* <p/>
* Timeout value is fixed at 5 seconds
*
* @deprecated Use this.sendSynchronous(command, timeout);
* @param command
* @return
* @throws XBeeException
*/
public AtCommandResponse sendAtCommand(AtCommand command) throws XBeeException {
return (AtCommandResponse) this.sendSynchronous(command, 5000);
}
/**
* Synchronous method for sending an XBeeRequest and obtaining the
* corresponding response (response that has same frame id).
* <p/>
* This method returns the first response object with a matching frame id, within the timeout
* period, so it is important to use a unique frame id (relative to previous subsequent requests).
* <p/>
* This method must only be called with requests that receive a response of
* type XBeeFrameIdResponse. All other request types will timeout.
* <p/>
* Keep in mind responses received here will also be available through the getResponse method
* and the packet listener. If you would prefer to not have these responses added to the response queue,
* you can add a ResponseQueueFilter via XBeeConfiguration to ignore packets that are sent in response to
* a request. Another alternative is to call clearResponseQueue prior to calling this method.
* <p/>
* It is recommended to use a timeout of at least 5 seconds, since some responses can take a few seconds or more
* (e.g. if remote radio is not powered on).
* <p/>
* This method is thread-safe
*
* @param xbeeRequest
*
* @return
* @throws XBeeException
* @throws XBeeTimeoutException thrown if no matching response is identified
*/
public XBeeResponse sendSynchronous(final XBeeRequest xbeeRequest, int timeout) throws XBeeTimeoutException, XBeeException {
if (xbeeRequest.getFrameId() == XBeeRequest.NO_RESPONSE_FRAME_ID) {
throw new XBeeException("Frame Id cannot be 0 for a synchronous call -- it will always timeout as there is no response!");
}
PacketListener pl = null;
try {
final List<XBeeResponse> container = new LinkedList<XBeeResponse>();
// this makes it thread safe -- prevents multiple threads from writing to output stream simultaneously
synchronized (sendPacketBlock) {
this.sendRequest(xbeeRequest);
}
pl = new PacketListener() {
// TODO handle error response as well
public void processResponse(XBeeResponse response) {
if (response instanceof XBeeFrameIdResponse && ((XBeeFrameIdResponse)response).getFrameId() == xbeeRequest.getFrameId()) {
// frame id matches -- yay we found it
container.add(response);
synchronized(container) {
container.notify();
}
}
}
};
this.addPacketListener(pl);
synchronized (container) {
try {
container.wait(timeout);
} catch (InterruptedException e) { }
}
if (container.size() == 0) {
// we didn't find a matching packet
throw new XBeeTimeoutException();
}
return (XBeeResponse) container.get(0);
} catch (IOException io) {
throw new XBeeException(io);
} finally {
if (pl != null) {
this.removePacketListener(pl);
}
}
}
/**
* Uses sendSynchronous timeout defined in XBeeConfiguration (default is 5000ms)
*/
public XBeeResponse sendSynchronous(final XBeeRequest request) throws XBeeTimeoutException, XBeeException {
return this.sendSynchronous(request, conf.getSendSynchronousTimeout());
}
/**
* Same as getResponse(int) but does not timeout.
* It's highly recommend that you always use a timeout because
* if the serial connection dies under certain conditions, you will end up waiting forever!
* <p/>
* Consider using the PacketListener for asynchronous (non-blocking) behavior
*
* @return
* @throws XBeeException
*/
public XBeeResponse getResponse() throws XBeeException {
return getResponseTimeout(null);
}
/**
* This method returns an XBeeResponse from the queue, if available, or
* waits up to "timeout" milliseconds for a response.
* <p/>
* There are three possible outcomes:
* <p/>
* 1. A packet is returned within "timeout" milliseconds <br/>
* 2. An XBeeTimeoutException is thrown (i.e. queue was empty for duration of timeout) <br/>
* 3. Null is returned if timeout is 0 and queue is empty. <br/>
* <p/>
* @param timeout milliseconds to wait for a response. A value of zero disables the timeout
* @return
* @throws XBeeException
* @throws XBeeTimeoutException if timeout occurs before a response is received
*/
public XBeeResponse getResponse(int timeout) throws XBeeException, XBeeTimeoutException {
return this.getResponseTimeout(timeout);
}
private XBeeResponse getResponseTimeout(Integer timeout) throws XBeeException, XBeeTimeoutException {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
XBeeResponse response;
try {
if (timeout != null) {
response = parser.getResponseQueue().poll(timeout, TimeUnit.MILLISECONDS);
} else {
response = parser.getResponseQueue().take();
}
} catch (InterruptedException e) {
throw new XBeeException("Error while attempting to remove packet from queue", e);
}
if (response == null && timeout > 0) {
throw new XBeeTimeoutException();
}
return response;
}
// public List<? extends XBeeResponse> collectResponses(int wait, ResponseFilter filter, CollectTerminator terminator) throws XBeeException {
//
// }
/**
* Collects responses until the timeout is reached or the CollectTerminator returns true
*
* @param wait
* @param terminator
* @return
* @throws XBeeException
*/
public List<? extends XBeeResponse> collectResponses(int wait, CollectTerminator terminator) throws XBeeException {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
long start = System.currentTimeMillis();
long callStart = 0;
int waitTime;
List<XBeeResponse> responseList = new ArrayList<XBeeResponse>();
XBeeResponse response = null;
try {
while (true) {
// compute the remaining wait time
waitTime = wait - (int)(System.currentTimeMillis() - start);
if (waitTime <= 0) {
break;
}
log.debug("calling getResponse with waitTime: " + waitTime);
if (log.isDebugEnabled()) {
callStart = System.currentTimeMillis();
}
response = this.getResponse(waitTime);
if (log.isDebugEnabled()) {
log.debug("Got response in " + (System.currentTimeMillis() - callStart));
}
responseList.add(response);
if (terminator != null && terminator.stop(response)) {
log.debug("Found terminating response.. exiting");
break;
}
}
} catch (XBeeTimeoutException e) {
// ok, we'll just return whatever is in the list
} catch (XBeeException e) {
throw e;
}
log.debug("Time is up.. returning list with " + responseList.size() + " packets");
return responseList;
}
/**
* Collects responses for wait milliseconds and returns responses as List
*
* @param wait
* @return
* @throws XBeeException
*/
public List<? extends XBeeResponse> collectResponses(int wait) throws XBeeException {
return this.collectResponses(wait, null);
}
/**
* Returns the number of packets available in the response queue for immediate consumption
*
* @return
*/
public int getResponseQueueSize() {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
return parser.getResponseQueue().size();
}
/**
* Shuts down RXTX and packet parser thread
*/
public void close() {
if (!this.isConnected()) {
throw new IllegalStateException("XBee is not connected");
}
// shutdown parser thread
if (parser != null) {
parser.setDone(true);
// interrupts thread, if waiting. does not interrupt thread if blocking on read
// serial port close will be closed prior to thread exit
parser.interrupt();
}
try {
this.xbeeConnection.getOutputStream().close();
} catch (IOException e) {
log.warn("Failed to close output stream", e);
}
this.type = null;
parser = null;
xbeeConnection = null;
}
/**
* Indicates if serial port connection has been established.
* The open method may be called if this returns true
*
* @return
*/
public boolean isConnected() {
try {
if (parser.getXBeeConnection().getInputStream() != null && parser.getXBeeConnection().getOutputStream() != null) {
return true;
}
return false;
} catch (Exception e) {
return false;
}
}
// TODO move to its own class
private int sequentialFrameId = 0xff;
public int getCurrentFrameId() {
// TODO move to separate class (e.g. FrameIdCounter)
return sequentialFrameId;
}
/**
* This is useful for obtaining a frame id when composing your XBeeRequest.
* It will return frame ids in a sequential manner until the maximum is reached (0xff)
* and it flips to 1 and starts over.
*
* Not Thread-safe
*
* @return
*/
public int getNextFrameId() {
if (sequentialFrameId == 0xff) {
// flip
sequentialFrameId = 1;
} else {
sequentialFrameId++;
}
return sequentialFrameId;
}
/**
* Updates the frame id. Any value between 1 and ff is valid
*
* @param val
* Jan 24, 2009
*/
public void updateFrameId(int val) {
if (val <=0 || val > 0xff) {
throw new IllegalArgumentException("invalid frame id");
}
this.sequentialFrameId = val;
}
/**
* Removes all packets off of the response queue
*/
public void clearResponseQueue() {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
parser.getResponseQueue().clear();
}
}
| |
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2013 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package ti.modules.titanium.ui.widget.listview;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollProxy;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.TiDimension;
import org.appcelerator.titanium.proxy.TiViewProxy;
import org.appcelerator.titanium.util.TiColorHelper;
import org.appcelerator.titanium.util.TiConvert;
import org.appcelerator.titanium.util.TiRHelper;
import org.appcelerator.titanium.util.TiRHelper.ResourceNotFoundException;
import org.appcelerator.titanium.view.TiCompositeLayout;
import org.appcelerator.titanium.view.TiCompositeLayout.LayoutArrangement;
import org.appcelerator.titanium.view.TiCompositeLayout.LayoutParams;
import org.appcelerator.titanium.view.TiUIView;
import ti.modules.titanium.ui.SearchBarProxy;
import ti.modules.titanium.ui.UIModule;
import ti.modules.titanium.ui.android.SearchViewProxy;
import ti.modules.titanium.ui.widget.searchbar.TiUISearchBar;
import ti.modules.titanium.ui.widget.searchbar.TiUISearchBar.OnSearchChangeListener;
import ti.modules.titanium.ui.widget.searchview.TiUISearchView;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.util.Pair;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.BaseAdapter;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
public class TiListView extends TiUIView implements OnSearchChangeListener {
private ListView listView;
private TiBaseAdapter adapter;
private ArrayList<ListSectionProxy> sections;
private AtomicInteger itemTypeCount;
private String defaultTemplateBinding;
private ListViewWrapper wrapper;
private HashMap<String, TiListViewTemplate> templatesByBinding;
private int listItemId;
public static int listContentId;
public static int isCheck;
public static int hasChild;
public static int disclosure;
public static int accessory;
private int headerFooterId;
public static LayoutInflater inflater;
private int titleId;
private int dividerHeight;
private ArrayList<Pair<Integer,Integer>> markers = new ArrayList<Pair<Integer,Integer>>();
private View headerView;
private View footerView;
private String searchText;
private boolean caseInsensitive;
private RelativeLayout searchLayout;
private static final String TAG = "TiListView";
/* We cache properties that already applied to the recycled list tiem in ViewItem.java
* However, since Android randomly selects a cached view to recycle, our cached properties
* will not be in sync with the native view's properties when user changes those values via
* User Interaction - i.e click. For this reason, we create a list that contains the properties
* that must be reset every time a view is recycled, to ensure synchronization. Currently, only
* "value" is in this list to correctly update the value of Ti.UI.Switch.
*/
public static List<String> MUST_SET_PROPERTIES = Arrays.asList(TiC.PROPERTY_VALUE, TiC.PROPERTY_AUTO_LINK, TiC.PROPERTY_TEXT, TiC.PROPERTY_HTML);
public static final String MIN_SEARCH_HEIGHT = "50dp";
public static final String MIN_ROW_HEIGHT = "30dp";
public static final int HEADER_FOOTER_WRAP_ID = 12345;
public static final int HEADER_FOOTER_VIEW_TYPE = 0;
public static final int HEADER_FOOTER_TITLE_TYPE = 1;
public static final int BUILT_IN_TEMPLATE_ITEM_TYPE = 2;
public static final int CUSTOM_TEMPLATE_ITEM_TYPE = 3;
class ListViewWrapper extends FrameLayout {
private boolean viewFocused = false;
private boolean selectionSet = false;
public ListViewWrapper(Context context) {
super(context);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
// To prevent undesired "focus" and "blur" events during layout caused
// by ListView temporarily taking focus, we will disable focus events until
// layout has finished.
// First check for a quick exit. listView can be null, such as if window closing.
// Starting with API 18, calling requestFocus() will trigger another layout pass of the listview,
// resulting in an infinite loop. Here we check if the view is already focused, and stop the loop.
if (listView == null || (Build.VERSION.SDK_INT >= 18 && listView != null && !changed && viewFocused)) {
viewFocused = false;
super.onLayout(changed, left, top, right, bottom);
return;
}
// Starting with API 21, setSelection() triggers another layout pass, so we need to end it here to prevent
// an infinite loop
if (Build.VERSION.SDK_INT >= 21 && selectionSet) {
selectionSet = false;
return;
}
OnFocusChangeListener focusListener = null;
View focusedView = listView.findFocus();
int cursorPosition = -1;
if (focusedView != null) {
OnFocusChangeListener listener = focusedView.getOnFocusChangeListener();
if (listener != null && listener instanceof TiUIView) {
//Before unfocus the current editText, store cursor position so
//we can restore it later
if (focusedView instanceof EditText) {
cursorPosition = ((EditText)focusedView).getSelectionStart();
}
focusedView.setOnFocusChangeListener(null);
focusListener = listener;
}
}
//We are temporarily going to block focus to descendants
//because LinearLayout on layout will try to find a focusable descendant
if (focusedView != null) {
listView.setDescendantFocusability(ViewGroup.FOCUS_BLOCK_DESCENDANTS);
}
super.onLayout(changed, left, top, right, bottom);
//Now we reset the descendant focusability
listView.setDescendantFocusability(ViewGroup.FOCUS_AFTER_DESCENDANTS);
TiViewProxy viewProxy = proxy;
if (viewProxy != null && viewProxy.hasListeners(TiC.EVENT_POST_LAYOUT)) {
viewProxy.fireEvent(TiC.EVENT_POST_LAYOUT, null);
}
// Layout is finished, re-enable focus events.
if (focusListener != null) {
// If the configuration changed, we manually fire the blur event
if (changed) {
focusedView.setOnFocusChangeListener(focusListener);
focusListener.onFocusChange(focusedView, false);
} else {
//Ok right now focus is with listView. So set it back to the focusedView
viewFocused = true;
focusedView.requestFocus();
focusedView.setOnFocusChangeListener(focusListener);
//Restore cursor position
if (cursorPosition != -1) {
((EditText)focusedView).setSelection(cursorPosition);
selectionSet = true;
}
}
}
}
}
public class TiBaseAdapter extends BaseAdapter {
Activity context;
public TiBaseAdapter(Activity activity) {
context = activity;
}
@Override
public int getCount() {
int count = 0;
for (int i = 0; i < sections.size(); i++) {
ListSectionProxy section = sections.get(i);
count += section.getItemCount();
}
return count;
}
@Override
public Object getItem(int arg0) {
//not using this method
return arg0;
}
@Override
public long getItemId(int position) {
//not using this method
return position;
}
//One type for header/footer title, one for header/footer view, one for built-in template, and one type per custom template.
@Override
public int getViewTypeCount() {
return 3 + templatesByBinding.size();
}
@Override
public int getItemViewType(int position) {
Pair<ListSectionProxy, Pair<Integer, Integer>> info = getSectionInfoByEntryIndex(position);
ListSectionProxy section = info.first;
int sectionItemIndex = info.second.second;
if (section.isHeaderTitle(sectionItemIndex) || section.isFooterTitle(sectionItemIndex))
return HEADER_FOOTER_TITLE_TYPE;
if (section.isHeaderView(sectionItemIndex) || section.isFooterView(sectionItemIndex)) {
return HEADER_FOOTER_VIEW_TYPE;
}
return section.getTemplateByIndex(sectionItemIndex).getType();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
//Get section info from index
Pair<ListSectionProxy, Pair<Integer, Integer>> info = getSectionInfoByEntryIndex(position);
ListSectionProxy section = info.first;
int sectionItemIndex = info.second.second;
int sectionIndex = info.second.first;
View content = convertView;
//Handles header/footer views and titles.
if (section.isHeaderView(sectionItemIndex) || section.isFooterView(sectionItemIndex)) {
return section.getHeaderOrFooterView(sectionItemIndex);
} else if (section.isHeaderTitle(sectionItemIndex) || section.isFooterTitle(sectionItemIndex)) {
//No content to reuse, so we create a new view
if (content == null) {
content = inflater.inflate(headerFooterId, null);
}
TextView title = (TextView)content.findViewById(titleId);
title.setText(section.getHeaderOrFooterTitle(sectionItemIndex));
return content;
}
//check marker and fire appropriate events
checkMarker(sectionIndex, sectionItemIndex, section.hasHeader());
//Handling templates
KrollDict data = section.getListItemData(sectionItemIndex);
TiListViewTemplate template = section.getTemplateByIndex(sectionItemIndex);
if (content != null) {
TiBaseListViewItem itemContent = (TiBaseListViewItem) content.findViewById(listContentId);
section.populateViews(data, itemContent, template, sectionItemIndex, sectionIndex, content);
} else {
content = inflater.inflate(listItemId, null);
TiBaseListViewItem itemContent = (TiBaseListViewItem) content.findViewById(listContentId);
LayoutParams params = new LayoutParams();
params.autoFillsWidth = true;
itemContent.setLayoutParams(params);
section.generateCellContent(sectionIndex, data, template, itemContent, sectionItemIndex, content);
}
return content;
}
}
public TiListView(TiViewProxy proxy, Activity activity) {
super(proxy);
//initializing variables
sections = new ArrayList<ListSectionProxy>();
itemTypeCount = new AtomicInteger(CUSTOM_TEMPLATE_ITEM_TYPE);
templatesByBinding = new HashMap<String, TiListViewTemplate>();
defaultTemplateBinding = UIModule.LIST_ITEM_TEMPLATE_DEFAULT;
caseInsensitive = true;
//handling marker
ArrayList<HashMap<String, Integer>> preloadMarkers = ((ListViewProxy)proxy).getPreloadMarkers();
if (preloadMarkers != null) {
setMarkers(preloadMarkers);
}
//initializing listView and adapter
ListViewWrapper wrapper = new ListViewWrapper(activity);
wrapper.setFocusable(false);
wrapper.setFocusableInTouchMode(false);
listView = new ListView(activity);
listView.setLayoutParams(new ViewGroup.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
wrapper.addView(listView);
adapter = new TiBaseAdapter(activity);
//init inflater
if (inflater == null) {
inflater = (LayoutInflater)activity.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
listView.setCacheColorHint(Color.TRANSPARENT);
getLayoutParams().autoFillsHeight = true;
getLayoutParams().autoFillsWidth = true;
listView.setFocusable(true);
listView.setFocusableInTouchMode(true);
listView.setDescendantFocusability(ViewGroup.FOCUS_AFTER_DESCENDANTS);
final TiViewProxy fProxy = proxy;
listView.setOnScrollListener(new OnScrollListener()
{
private int _firstVisibleItem = 0;
private int _visibleItemCount = 0;
private boolean canFireScrollStart = true;
private boolean canFireScrollEnd = false;
@Override
public void onScrollStateChanged(AbsListView view, int scrollState)
{
String eventName;
if (scrollState == OnScrollListener.SCROLL_STATE_IDLE && canFireScrollEnd) {
eventName = TiC.EVENT_SCROLLEND;
canFireScrollEnd = false;
canFireScrollStart = true;
} else if (scrollState == OnScrollListener.SCROLL_STATE_TOUCH_SCROLL && canFireScrollStart) {
eventName = TiC.EVENT_SCROLLSTART;
canFireScrollEnd = true;
canFireScrollStart = false;
} else {
return;
}
KrollDict eventArgs = new KrollDict();
Pair<ListSectionProxy, Pair<Integer, Integer>> info = getSectionInfoByEntryIndex(_firstVisibleItem);
if (info == null) {
return;
}
int visibleItemCount = _visibleItemCount;
int itemIndex = info.second.second;
ListSectionProxy section = info.first;
if (section.getHeaderTitle() == null || section.getHeaderView() == null) {
if (itemIndex > 0) {
itemIndex -= 1;
}
visibleItemCount -=1;
}
eventArgs.put("firstVisibleSection", section);
eventArgs.put("firstVisibleSectionIndex", info.second.first);
eventArgs.put("firstVisibleItem", section.getItemAt(itemIndex));
eventArgs.put("firstVisibleItemIndex", itemIndex);
eventArgs.put("visibleItemCount", visibleItemCount);
fProxy.fireEvent(eventName, eventArgs, false);
}
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount)
{
_firstVisibleItem = firstVisibleItem;
_visibleItemCount = visibleItemCount;
}
});
try {
headerFooterId = TiRHelper.getResource("layout.titanium_ui_list_header_or_footer");
listItemId = TiRHelper.getResource("layout.titanium_ui_list_item");
titleId = TiRHelper.getResource("id.titanium_ui_list_header_or_footer_title");
listContentId = TiRHelper.getResource("id.titanium_ui_list_item_content");
isCheck = TiRHelper.getResource("drawable.btn_check_buttonless_on_64");
hasChild = TiRHelper.getResource("drawable.btn_more_64");
disclosure = TiRHelper.getResource("drawable.disclosure_64");
accessory = TiRHelper.getResource("id.titanium_ui_list_item_accessoryType");
} catch (ResourceNotFoundException e) {
Log.e(TAG, "XML resources could not be found!!!", Log.DEBUG_MODE);
}
this.wrapper = wrapper;
setNativeView(wrapper);
}
public String getSearchText() {
return searchText;
}
public boolean getCaseInsensitive() {
return caseInsensitive;
}
public void setHeaderTitle(String title) {
TextView textView = (TextView) headerView.findViewById(titleId);
textView.setText(title);
if (textView.getVisibility() == View.GONE) {
textView.setVisibility(View.VISIBLE);
}
}
public void setFooterTitle(String title) {
TextView textView = (TextView) footerView.findViewById(titleId);
textView.setText(title);
if (textView.getVisibility() == View.GONE) {
textView.setVisibility(View.VISIBLE);
}
}
@Override
public void registerForTouch()
{
registerForTouch(listView);
}
public void setMarker(HashMap<String, Integer> markerItem)
{
markers.clear();
addMarker(markerItem);
}
public void setMarkers(ArrayList<HashMap<String, Integer>> markerItems)
{
markers.clear();
for (int i = 0; i < markerItems.size(); ++i) {
HashMap<String, Integer> markerItem = markerItems.get(i);
addMarker(markerItem);
}
}
public void checkMarker(int sectionIndex, int sectionItemIndex, boolean hasHeader)
{
if (markers.isEmpty()) {
return;
}
if (hasHeader) {
sectionItemIndex--;
}
Iterator<Pair<Integer, Integer>> iterator = markers.iterator();
while (iterator.hasNext()) {
Pair<Integer, Integer> marker = iterator.next();
if (sectionIndex == marker.first && sectionItemIndex == marker.second) {
KrollDict data = new KrollDict();
data.put(TiC.PROPERTY_SECTION_INDEX, sectionIndex);
data.put(TiC.PROPERTY_ITEM_INDEX, sectionItemIndex);
if (proxy != null && proxy.hasListeners(TiC.EVENT_MARKER)) {
proxy.fireEvent(TiC.EVENT_MARKER, data, false);
}
iterator.remove();
}
}
}
public void addMarker(HashMap<String, Integer> markerItem)
{
int sectionIndex = markerItem.get(TiC.PROPERTY_SECTION_INDEX);
int itemIndex = markerItem.get(TiC.PROPERTY_ITEM_INDEX);
markers.add(new Pair<Integer, Integer>(sectionIndex, itemIndex));
}
public void processProperties(KrollDict d) {
if (d.containsKey(TiC.PROPERTY_TEMPLATES)) {
Object templates = d.get(TiC.PROPERTY_TEMPLATES);
if (templates != null) {
processTemplates(new KrollDict((HashMap)templates));
}
}
if (d.containsKey(TiC.PROPERTY_SEARCH_TEXT)) {
this.searchText = TiConvert.toString(d, TiC.PROPERTY_SEARCH_TEXT);
}
if (d.containsKey(TiC.PROPERTY_SEARCH_VIEW)) {
TiViewProxy searchView = (TiViewProxy) d.get(TiC.PROPERTY_SEARCH_VIEW);
if (isSearchViewValid(searchView)) {
TiUIView search = searchView.getOrCreateView();
setSearchListener(searchView, search);
layoutSearchView(searchView);
} else {
Log.e(TAG, "Searchview type is invalid");
}
}
if (d.containsKey(TiC.PROPERTY_CASE_INSENSITIVE_SEARCH)) {
this.caseInsensitive = TiConvert.toBoolean(d, TiC.PROPERTY_CASE_INSENSITIVE_SEARCH, true);
}
if (d.containsKey(TiC.PROPERTY_DIVIDER_HEIGHT)) {
TiDimension dHeight = TiConvert.toTiDimension(d.get(TiC.PROPERTY_DIVIDER_HEIGHT), -1);
int height = dHeight.getAsPixels(listView);
if (height > 0) {
dividerHeight = height;
listView.setDividerHeight(height);
}
}
if (d.containsKey(TiC.PROPERTY_SEPARATOR_COLOR)) {
String color = TiConvert.toString(d, TiC.PROPERTY_SEPARATOR_COLOR);
setSeparatorColor(color);
}
if (d.containsKey(TiC.PROPERTY_FOOTER_DIVIDERS_ENABLED)) {
boolean enabled = TiConvert.toBoolean(d, TiC.PROPERTY_FOOTER_DIVIDERS_ENABLED, false);
listView.setFooterDividersEnabled(enabled);
} else {
listView.setFooterDividersEnabled(false);
}
if (d.containsKey(TiC.PROPERTY_HEADER_DIVIDERS_ENABLED)) {
boolean enabled = TiConvert.toBoolean(d, TiC.PROPERTY_HEADER_DIVIDERS_ENABLED, false);
listView.setHeaderDividersEnabled(enabled);
} else {
listView.setHeaderDividersEnabled(false);
}
if (d.containsKey(TiC.PROPERTY_SHOW_VERTICAL_SCROLL_INDICATOR)) {
listView.setVerticalScrollBarEnabled(TiConvert.toBoolean(d, TiC.PROPERTY_SHOW_VERTICAL_SCROLL_INDICATOR, true));
}
if (d.containsKey(TiC.PROPERTY_DEFAULT_ITEM_TEMPLATE)) {
defaultTemplateBinding = TiConvert.toString(d, TiC.PROPERTY_DEFAULT_ITEM_TEMPLATE);
}
ListViewProxy listProxy = (ListViewProxy) proxy;
if (d.containsKey(TiC.PROPERTY_SECTIONS)) {
//if user didn't append/modify/delete sections before this is called, we process sections
//as usual. Otherwise, we process the preloadSections, which should also contain the section(s)
//from this dictionary as well as other sections that user append/insert/deleted prior to this.
if (!listProxy.getPreload()) {
processSections((Object[])d.get(TiC.PROPERTY_SECTIONS));
} else {
processSections(listProxy.getPreloadSections().toArray());
}
} else if (listProxy.getPreload()) {
//if user didn't specify 'sections' property upon creation of listview but append/insert it afterwards
//we process them instead.
processSections(listProxy.getPreloadSections().toArray());
}
listProxy.clearPreloadSections();
listProxy.setPreload(false);
if (d.containsKey(TiC.PROPERTY_HEADER_VIEW)) {
Object viewObj = d.get(TiC.PROPERTY_HEADER_VIEW);
setHeaderOrFooterView(viewObj, true);
} else if (d.containsKey(TiC.PROPERTY_HEADER_TITLE)) {
headerView = inflater.inflate(headerFooterId, null);
setHeaderTitle(TiConvert.toString(d, TiC.PROPERTY_HEADER_TITLE));
}
if (d.containsKey(TiC.PROPERTY_FOOTER_VIEW)) {
Object viewObj = d.get(TiC.PROPERTY_FOOTER_VIEW);
setHeaderOrFooterView(viewObj, false);
} else if (d.containsKey(TiC.PROPERTY_FOOTER_TITLE)) {
footerView = inflater.inflate(headerFooterId, null);
setFooterTitle(TiConvert.toString(d, TiC.PROPERTY_FOOTER_TITLE));
}
//Check to see if headerView and footerView are specified. If not, we hide the views
if (headerView == null) {
headerView = inflater.inflate(headerFooterId, null);
headerView.findViewById(titleId).setVisibility(View.GONE);
}
if (footerView == null) {
footerView = inflater.inflate(headerFooterId, null);
footerView.findViewById(titleId).setVisibility(View.GONE);
}
//Have to add header and footer before setting adapter
listView.addHeaderView(headerView, null, false);
listView.addFooterView(footerView, null, false);
listView.setAdapter(adapter);
super.processProperties(d);
}
private void layoutSearchView(TiViewProxy searchView) {
TiUIView search = searchView.getOrCreateView();
RelativeLayout layout = new RelativeLayout(proxy.getActivity());
layout.setGravity(Gravity.NO_GRAVITY);
layout.setPadding(0, 0, 0, 0);
addSearchLayout(layout, searchView, search);
setNativeView(layout);
}
private void addSearchLayout(RelativeLayout layout, TiViewProxy searchView, TiUIView search) {
RelativeLayout.LayoutParams p = createBasicSearchLayout();
p.addRule(RelativeLayout.ALIGN_PARENT_TOP);
TiDimension rawHeight;
if (searchView.hasProperty(TiC.PROPERTY_HEIGHT)) {
rawHeight = TiConvert.toTiDimension(searchView.getProperty(TiC.PROPERTY_HEIGHT), 0);
} else {
rawHeight = TiConvert.toTiDimension(MIN_SEARCH_HEIGHT, 0);
}
p.height = rawHeight.getAsPixels(layout);
View nativeView = search.getNativeView();
layout.addView(nativeView, p);
p = createBasicSearchLayout();
p.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM);
p.addRule(RelativeLayout.BELOW, nativeView.getId());
ViewParent parentWrapper = wrapper.getParent();
if (parentWrapper != null && parentWrapper instanceof ViewGroup) {
//get the previous layout params so we can reset with new layout
ViewGroup.LayoutParams lp = wrapper.getLayoutParams();
ViewGroup parentView = (ViewGroup) parentWrapper;
//remove view from parent
parentView.removeView(wrapper);
//add new layout
layout.addView(wrapper, p);
parentView.addView(layout, lp);
} else {
layout.addView(wrapper, p);
}
this.searchLayout = layout;
}
private RelativeLayout.LayoutParams createBasicSearchLayout() {
RelativeLayout.LayoutParams p = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT);
p.addRule(RelativeLayout.ALIGN_PARENT_LEFT);
p.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
return p;
}
private void setHeaderOrFooterView (Object viewObj, boolean isHeader) {
if (viewObj instanceof TiViewProxy) {
TiViewProxy viewProxy = (TiViewProxy)viewObj;
View view = layoutHeaderOrFooterView(viewProxy);
if (view != null) {
if (isHeader) {
headerView = view;
} else {
footerView = view;
}
}
}
}
private void reFilter(String searchText) {
if (searchText != null) {
for (int i = 0; i < sections.size(); ++i) {
ListSectionProxy section = sections.get(i);
section.applyFilter(searchText);
}
}
if (adapter != null) {
adapter.notifyDataSetChanged();
}
}
private boolean isSearchViewValid(TiViewProxy proxy) {
if (proxy instanceof SearchBarProxy || proxy instanceof SearchViewProxy) {
return true;
} else {
return false;
}
}
public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy) {
if (key.equals(TiC.PROPERTY_HEADER_TITLE)) {
setHeaderTitle(TiConvert.toString(newValue));
} else if (key.equals(TiC.PROPERTY_FOOTER_TITLE)) {
setFooterTitle(TiConvert.toString(newValue));
} else if (key.equals(TiC.PROPERTY_SECTIONS) && newValue instanceof Object[] ) {
processSectionsAndNotify((Object[])newValue);
} else if (key.equals(TiC.PROPERTY_SEARCH_TEXT)) {
this.searchText = TiConvert.toString(newValue);
if (this.searchText != null) {
reFilter(this.searchText);
}
} else if (key.equals(TiC.PROPERTY_CASE_INSENSITIVE_SEARCH)) {
this.caseInsensitive = TiConvert.toBoolean(newValue, true);
if (this.searchText != null) {
reFilter(this.searchText);
}
} else if (key.equals(TiC.PROPERTY_SEARCH_VIEW)) {
TiViewProxy searchView = (TiViewProxy) newValue;
if (isSearchViewValid(searchView)) {
TiUIView search = searchView.getOrCreateView();
setSearchListener(searchView, search);
if (searchLayout != null) {
searchLayout.removeAllViews();
addSearchLayout(searchLayout, searchView, search);
} else {
layoutSearchView(searchView);
}
} else {
Log.e(TAG, "Searchview type is invalid");
}
} else if (key.equals(TiC.PROPERTY_SHOW_VERTICAL_SCROLL_INDICATOR) && newValue != null) {
listView.setVerticalScrollBarEnabled(TiConvert.toBoolean(newValue));
} else if (key.equals(TiC.PROPERTY_DEFAULT_ITEM_TEMPLATE) && newValue != null) {
defaultTemplateBinding = TiConvert.toString(newValue);
refreshItems();
} else if (key.equals(TiC.PROPERTY_SEPARATOR_COLOR)) {
String color = TiConvert.toString(newValue);
setSeparatorColor(color);
} else if (key.equals(TiC.PROPERTY_DIVIDER_HEIGHT)) {
TiDimension dHeight = TiConvert.toTiDimension(newValue, -1);
int height = dHeight.getAsPixels(listView);
if (height > 0) {
dividerHeight = height;
listView.setDividerHeight(height);
}
} else {
super.propertyChanged(key, oldValue, newValue, proxy);
}
}
private void setSearchListener(TiViewProxy searchView, TiUIView search)
{
if (searchView instanceof SearchBarProxy) {
((TiUISearchBar)search).setOnSearchChangeListener(this);
} else if (searchView instanceof SearchViewProxy) {
((TiUISearchView)search).setOnSearchChangeListener(this);
}
}
private void setSeparatorColor(String color) {
int sepColor = TiColorHelper.parseColor(color);
int dHeight = 0;
if (dividerHeight == 0) {
dHeight = listView.getDividerHeight();
} else {
dHeight = dividerHeight;
}
listView.setDivider(new ColorDrawable(sepColor));
listView.setDividerHeight(dHeight);
}
private void refreshItems() {
for (int i = 0; i < sections.size(); i++) {
ListSectionProxy section = sections.get(i);
section.refreshItems();
}
}
protected void processTemplates(KrollDict templates) {
for (String key : templates.keySet()) {
//Here we bind each template with a key so we can use it to look up later
KrollDict properties = new KrollDict((HashMap)templates.get(key));
TiListViewTemplate template = new TiListViewTemplate(key, properties);
//Set type to template, for recycling purposes.
template.setType(getItemType());
templatesByBinding.put(key, template);
//set parent of root item
template.setRootParent(proxy);
}
}
public View layoutHeaderOrFooterView (TiViewProxy viewProxy) {
TiUIView tiView = viewProxy.peekView();
if (tiView != null) {
TiViewProxy parentProxy = viewProxy.getParent();
//Remove parent view if possible
if (parentProxy != null) {
TiUIView parentView = parentProxy.peekView();
if (parentView != null) {
parentView.remove(tiView);
}
}
} else {
tiView = viewProxy.forceCreateView();
}
View outerView = tiView.getOuterView();
ViewGroup parentView = (ViewGroup) outerView.getParent();
if (parentView != null && parentView.getId() == HEADER_FOOTER_WRAP_ID) {
return parentView;
} else {
//add a wrapper so layout params such as height, width takes in effect.
TiCompositeLayout wrapper = new TiCompositeLayout(viewProxy.getActivity(), LayoutArrangement.DEFAULT, null);
AbsListView.LayoutParams params = new AbsListView.LayoutParams(AbsListView.LayoutParams.MATCH_PARENT, AbsListView.LayoutParams.WRAP_CONTENT);
wrapper.setLayoutParams(params);
outerView = tiView.getOuterView();
wrapper.addView(outerView, tiView.getLayoutParams());
wrapper.setId(HEADER_FOOTER_WRAP_ID);
return wrapper;
}
}
protected void processSections(Object[] sections) {
this.sections.clear();
for (int i = 0; i < sections.length; i++) {
processSection(sections[i], -1);
}
}
protected void processSectionsAndNotify(Object[] sections) {
processSections(sections);
if (adapter != null) {
adapter.notifyDataSetChanged();
}
}
protected void processSection(Object sec, int index) {
if (sec instanceof ListSectionProxy) {
ListSectionProxy section = (ListSectionProxy) sec;
if (this.sections.contains(section)) {
return;
}
if (index == -1 || index >= sections.size()) {
this.sections.add(section);
} else {
this.sections.add(index, section);
}
section.setAdapter(adapter);
section.setListView(this);
//Attempts to set type for existing templates.
section.setTemplateType();
//Process preload data if any
section.processPreloadData();
//Apply filter if necessary
if (searchText != null) {
section.applyFilter(searchText);
}
}
}
protected Pair<ListSectionProxy, Pair<Integer, Integer>> getSectionInfoByEntryIndex(int index) {
if (index < 0) {
return null;
}
for (int i = 0; i < sections.size(); i++) {
ListSectionProxy section = sections.get(i);
int sectionItemCount = section.getItemCount();
if (index <= sectionItemCount - 1) {
return new Pair<ListSectionProxy, Pair<Integer, Integer>>(section, new Pair<Integer, Integer>(i, index));
} else {
index -= sectionItemCount;
}
}
return null;
}
public int getItemType() {
return itemTypeCount.getAndIncrement();
}
public TiListViewTemplate getTemplateByBinding(String binding) {
return templatesByBinding.get(binding);
}
public String getDefaultTemplateBinding() {
return defaultTemplateBinding;
}
public int getSectionCount() {
return sections.size();
}
public void appendSection(Object section) {
if (section instanceof Object[]) {
Object[] secs = (Object[]) section;
for (int i = 0; i < secs.length; i++) {
processSection(secs[i], -1);
}
} else {
processSection(section, -1);
}
adapter.notifyDataSetChanged();
}
public void deleteSectionAt(int index) {
if (index >= 0 && index < sections.size()) {
sections.remove(index);
adapter.notifyDataSetChanged();
} else {
Log.e(TAG, "Invalid index to delete section");
}
}
public void insertSectionAt(int index, Object section) {
if (index > sections.size()) {
Log.e(TAG, "Invalid index to insert/replace section");
return;
}
if (section instanceof Object[]) {
Object[] secs = (Object[]) section;
for (int i = 0; i < secs.length; i++) {
processSection(secs[i], index);
index++;
}
} else {
processSection(section, index);
}
adapter.notifyDataSetChanged();
}
public void replaceSectionAt(int index, Object section) {
deleteSectionAt(index);
insertSectionAt(index, section);
}
private int findItemPosition(int sectionIndex, int sectionItemIndex) {
int position = 0;
for (int i = 0; i < sections.size(); i++) {
ListSectionProxy section = sections.get(i);
if (i == sectionIndex) {
if (sectionItemIndex >= section.getContentCount()) {
Log.e(TAG, "Invalid item index");
return -1;
}
position += sectionItemIndex;
if (section.getHeaderTitle() != null) {
position += 1;
}
break;
} else {
position += section.getItemCount();
}
}
return position;
}
protected void scrollToItem(int sectionIndex, int sectionItemIndex, boolean animated) {
final int position = findItemPosition(sectionIndex, sectionItemIndex);
if (position > -1) {
if (animated) {
listView.smoothScrollToPosition(position + 1);
} else {
listView.post(new Runnable()
{
@Override
public void run()
{
listView.setSelection(position + 1);
}
});
}
}
}
public void release() {
for (int i = 0; i < sections.size(); i++) {
sections.get(i).releaseViews();
}
templatesByBinding.clear();
sections.clear();
if (wrapper != null) {
wrapper = null;
}
if (listView != null) {
listView.setAdapter(null);
listView = null;
}
if (headerView != null) {
headerView = null;
}
if (footerView != null) {
footerView = null;
}
super.release();
}
@Override
public void filterBy(String text)
{
this.searchText = text;
reFilter(text);
}
public ListSectionProxy[] getSections()
{
return sections.toArray(new ListSectionProxy[sections.size()]);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Anton Avtamonov
* @version $Revision$
*/
package javax.swing.plaf.basic;
import java.awt.Insets;
import java.awt.Point;
import java.awt.event.MouseEvent;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JList;
import javax.swing.JScrollPane;
import javax.swing.ListSelectionModel;
import javax.swing.SwingTestCase;
import javax.swing.border.LineBorder;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
public class BasicComboPopupTest extends SwingTestCase {
private BasicComboPopup popup;
private JComboBox comboBox;
public BasicComboPopupTest(final String name) {
super(name);
}
@Override
protected void setUp() throws Exception {
comboBox = new JComboBox(new Object[] { "1", "2", "3" });
popup = new BasicComboPopup(comboBox);
((BasicComboBoxUI) comboBox.getUI()).popup = popup;
}
@Override
protected void tearDown() throws Exception {
comboBox = null;
popup = null;
}
public void testBasicComboPopup() throws Exception {
assertNotNull(popup.comboBox);
assertNotNull(popup.list);
assertNotNull(popup.scroller);
assertEquals(3, popup.list.getModel().getSize());
popup = new BasicComboPopup(new JComboBox());
assertNotNull(popup.list);
assertEquals(0, popup.list.getModel().getSize());
assertTrue(popup.comboBox.getModel() == popup.list.getModel());
assertEquals(popup.getComponent(), popup);
assertEquals(1, popup.getComponentCount());
assertEquals(popup.scroller, popup.getComponent(0));
assertEquals(ListSelectionModel.SINGLE_SELECTION, popup.list.getSelectionMode());
assertTrue(popup.getBorder().getClass() == LineBorder.class);
assertEquals(new Insets(1, 1, 1, 1), popup.getInsets());
assertFalse(popup.isAutoScrolling);
}
public void testShowHide() throws Exception {
createVisibleComboBox();
popup.show();
assertTrue(popup.isShowing());
popup.show();
assertTrue(popup.isShowing());
popup.hide();
assertFalse(popup.isShowing());
popup.hide();
assertFalse(popup.isShowing());
popup.show();
assertTrue(popup.isShowing());
}
public void testGetList() throws Exception {
assertNotNull(popup.getList());
assertEquals(popup.list, popup.getList());
JList newList = new JList();
popup.list = newList;
assertEquals(newList, popup.getList());
}
public void testGetMouseListener() throws Exception {
assertNull(popup.mouseListener);
assertEquals(popup.getMouseListener(), popup.mouseListener);
if (isHarmony()) {
assertTrue(popup.getMouseListener().getClass() == BasicComboPopup.InvocationMouseHandler.class);
}
}
public void testGetMouseMotionListener() throws Exception {
assertNull(popup.mouseMotionListener);
assertEquals(popup.getMouseMotionListener(), popup.mouseMotionListener);
if (isHarmony()) {
assertTrue(popup.getMouseMotionListener().getClass() == BasicComboPopup.InvocationMouseMotionHandler.class);
}
}
public void testGetKeyListener() throws Exception {
assertNull(popup.keyListener);
assertNull(popup.getKeyListener());
}
public void testUninstallingUI() throws Exception {
comboBox = new JComboBox();
int mouseListenerCount = comboBox.getMouseListeners().length;
int mouseMotionListenerCount = comboBox.getMouseMotionListeners().length;
int itemListenerCount = comboBox.getItemListeners().length;
int propertyChangeListenerCount = comboBox.getPropertyChangeListeners().length;
int keyListenerCount = comboBox.getKeyListeners().length;
popup = new BasicComboPopup(comboBox);
assertEquals(mouseListenerCount, comboBox.getMouseListeners().length);
assertEquals(mouseMotionListenerCount, comboBox.getMouseMotionListeners().length);
assertEquals(keyListenerCount, comboBox.getKeyListeners().length);
assertEquals(itemListenerCount + 1, comboBox.getItemListeners().length);
assertEquals(propertyChangeListenerCount + 1,
comboBox.getPropertyChangeListeners().length);
popup.uninstallingUI();
assertEquals(mouseListenerCount, comboBox.getMouseListeners().length);
assertEquals(mouseMotionListenerCount, comboBox.getMouseMotionListeners().length);
assertEquals(keyListenerCount, comboBox.getKeyListeners().length);
assertEquals(itemListenerCount, comboBox.getItemListeners().length);
assertEquals(propertyChangeListenerCount, comboBox.getPropertyChangeListeners().length);
}
public void testFirePopupMenuWillBecomeVisibleInvisibleCanceled() throws Exception {
PopupMenuController comboController = new PopupMenuController();
PopupMenuController popupController = new PopupMenuController();
popup.addPopupMenuListener(popupController);
popup.comboBox.addPopupMenuListener(comboController);
popup.firePopupMenuCanceled();
assertNotNull(popupController.getEvent());
assertNotNull(comboController.getEvent());
assertEquals(PopupMenuController.MENU_CANCELED, popupController.getEventType());
assertEquals(PopupMenuController.MENU_CANCELED, comboController.getEventType());
popupController.reset();
comboController.reset();
popup.firePopupMenuWillBecomeInvisible();
assertNotNull(popupController.getEvent());
assertNotNull(comboController.getEvent());
assertEquals(PopupMenuController.MENU_INVISIBLE, popupController.getEventType());
assertEquals(PopupMenuController.MENU_INVISIBLE, comboController.getEventType());
popupController.reset();
comboController.reset();
popup.firePopupMenuWillBecomeVisible();
assertNotNull(popupController.getEvent());
assertNotNull(comboController.getEvent());
assertEquals(PopupMenuController.MENU_VISIBLE, popupController.getEventType());
assertEquals(PopupMenuController.MENU_VISIBLE, comboController.getEventType());
}
public void testCreateMouseListener() throws Exception {
if (isHarmony()) {
assertTrue(popup.createMouseListener().getClass() == BasicComboPopup.InvocationMouseHandler.class);
assertFalse(popup.createMouseListener() == popup.createMouseListener());
}
}
public void testCreateMouseMotionListener() throws Exception {
if (isHarmony()) {
assertTrue(popup.createMouseMotionListener().getClass() == BasicComboPopup.InvocationMouseMotionHandler.class);
assertFalse(popup.createMouseMotionListener() == popup.createMouseMotionListener());
}
}
public void testCreateKeyListener() throws Exception {
assertNull(popup.createKeyListener());
}
public void testCreateListSelectionListener() throws Exception {
assertNull(popup.createListSelectionListener());
assertNull(popup.listSelectionListener);
}
public void testCreateListDataListener() throws Exception {
assertNull(popup.createListDataListener());
assertNull(popup.listDataListener);
}
public void testCreateListMouseListener() throws Exception {
if (isHarmony()) {
assertTrue(popup.listMouseListener.getClass() == BasicComboPopup.ListMouseHandler.class);
assertTrue(popup.createListMouseListener().getClass() == BasicComboPopup.ListMouseHandler.class);
assertFalse(popup.createListMouseListener() == popup.createListMouseListener());
}
}
public void testCreateListMouseMotionListener() throws Exception {
if (isHarmony()) {
assertTrue(popup.listMouseMotionListener.getClass() == BasicComboPopup.ListMouseMotionHandler.class);
assertTrue(popup.createListMouseMotionListener().getClass() == BasicComboPopup.ListMouseMotionHandler.class);
assertFalse(popup.createListMouseMotionListener() == popup
.createListMouseMotionListener());
}
}
public void testCreatePropertyChangeListener() throws Exception {
if (isHarmony()) {
assertTrue(popup.propertyChangeListener.getClass() == BasicComboPopup.PropertyChangeHandler.class);
assertTrue(popup.createPropertyChangeListener().getClass() == BasicComboPopup.PropertyChangeHandler.class);
assertFalse(popup.createPropertyChangeListener() == popup
.createPropertyChangeListener());
}
}
public void testCreateItemListener() throws Exception {
if (isHarmony()) {
assertTrue(popup.itemListener.getClass() == BasicComboPopup.ItemHandler.class);
assertTrue(popup.createItemListener().getClass() == BasicComboPopup.ItemHandler.class);
assertFalse(popup.createItemListener() == popup.createItemListener());
}
}
public void testCreateList() throws Exception {
assertNotSame(popup.createList(), popup.createList());
assertNotSame(popup.createList(), popup.list);
}
public void testConfigureList() throws Exception {
popup.list = new JList();
int mouseListenerCount = popup.list.getMouseListeners().length;
int mouseMotionListenerCount = popup.list.getMouseMotionListeners().length;
popup.configureList();
assertEquals(mouseListenerCount + 1, popup.list.getMouseListeners().length);
assertEquals(mouseMotionListenerCount + 1, popup.list.getMouseMotionListeners().length);
assertEquals(ListSelectionModel.SINGLE_SELECTION, popup.list.getSelectionMode());
}
public void testCreateScroller() throws Exception {
assertNotNull(popup.scroller);
assertNotSame(popup.createScroller(), popup.createScroller());
assertNotSame(popup.createScroller(), popup.scroller);
}
public void testConfigureScroller() throws Exception {
popup.scroller = new JScrollPane();
popup.configureScroller();
}
public void testInstallUninstallComboBoxModelListeners() throws Exception {
popup.installComboBoxModelListeners(null);
popup.uninstallComboBoxModelListeners(null);
}
public void testInstallUninstallComboBoxListeners() throws Exception {
int mouseListenerCount = comboBox.getMouseListeners().length;
int mouseMotionListenerCount = comboBox.getMouseMotionListeners().length;
int itemListenerCount = comboBox.getItemListeners().length;
int propertyChangeListenerCount = comboBox.getPropertyChangeListeners().length;
int keyListenerCount = comboBox.getKeyListeners().length;
popup.installComboBoxListeners();
assertEquals(mouseListenerCount, comboBox.getMouseListeners().length);
assertEquals(mouseMotionListenerCount, comboBox.getMouseMotionListeners().length);
assertEquals(keyListenerCount, comboBox.getKeyListeners().length);
assertEquals(itemListenerCount + 1, comboBox.getItemListeners().length);
assertEquals(propertyChangeListenerCount + 1,
comboBox.getPropertyChangeListeners().length);
}
public void testInstallUninstallKeyboardActions() throws Exception {
int count = popup.comboBox.getActionMap().allKeys().length;
popup.uninstallKeyboardActions();
assertEquals(count, popup.comboBox.getActionMap().allKeys().length);
popup.installKeyboardActions();
assertEquals(count, popup.comboBox.getActionMap().allKeys().length);
}
public void testInstallListListeners() throws Exception {
int mouseListenerCount = popup.list.getMouseListeners().length;
int mouseMotionListenerCount = popup.list.getMouseMotionListeners().length;
int selectionListenerCount = popup.list.getListSelectionListeners().length;
popup.installListListeners();
assertEquals(mouseListenerCount + 1, popup.list.getMouseListeners().length);
assertEquals(mouseMotionListenerCount + 1, popup.list.getMouseMotionListeners().length);
assertEquals(selectionListenerCount, popup.list.getListSelectionListeners().length);
}
public void testIsFocusTraversable() throws Exception {
assertFalse(popup.isFocusTraversable());
}
public void testStartStopAutoscrolloing() throws Exception {
assertNull(popup.autoscrollTimer);
assertFalse(popup.isAutoScrolling);
popup.startAutoScrolling(BasicComboPopup.SCROLL_UP);
assertNotNull(popup.autoscrollTimer);
assertTrue(popup.isAutoScrolling);
assertEquals(BasicComboPopup.SCROLL_UP, popup.scrollDirection);
popup.startAutoScrolling(BasicComboPopup.SCROLL_DOWN);
assertNotNull(popup.autoscrollTimer);
assertTrue(popup.isAutoScrolling);
assertEquals(BasicComboPopup.SCROLL_DOWN, popup.scrollDirection);
popup.stopAutoScrolling();
assertFalse(popup.isAutoScrolling);
}
public void testAutoScrollUpDown() throws Exception {
if (isHarmony()) {
createVisibleComboBox();
popup.show();
popup.list.setSelectedIndex(2);
popup.autoScrollUp();
assertEquals(0, popup.list.getSelectedIndex());
popup.autoScrollUp();
assertEquals(0, popup.list.getSelectedIndex());
popup.autoScrollDown();
assertEquals(2, popup.list.getSelectedIndex());
popup.autoScrollDown();
assertEquals(2, popup.list.getSelectedIndex());
popup.autoScrollUp();
assertEquals(0, popup.list.getSelectedIndex());
}
}
public void testGetAccessibleContext() throws Exception {
assertNotNull(popup.getAccessibleContext());
// Is not clear how it should be
// assertEquals(popup.comboBox, popup.getAccessibleContext().getAccessibleParent());
}
public void testTogglePopup() throws Exception {
createVisibleComboBox();
assertFalse(popup.isShowing());
popup.togglePopup();
assertTrue(popup.isShowing());
popup.togglePopup();
assertFalse(popup.isShowing());
}
public void testConvertMouseEvent() throws Exception {
MouseEvent original = createMouseEvent(0, 0);
assertNotSame(original, popup.convertMouseEvent(original));
comboBox.setLocation(0, 0);
assertEquals(new Point(10, 20), popup.convertMouseEvent(createMouseEvent(10, 20))
.getPoint());
assertEquals(new Point(-10, -20), popup.convertMouseEvent(createMouseEvent(-10, -20))
.getPoint());
comboBox.setLocation(100, 200);
assertEquals(new Point(110, 220), popup.convertMouseEvent(createMouseEvent(10, 20))
.getPoint());
assertEquals(new Point(90, 180), popup.convertMouseEvent(createMouseEvent(-10, -20))
.getPoint());
}
public void testGetPopupHeightForRowCount() throws Exception {
popup = new BasicComboPopup(new JComboBox());
assertEquals(100, popup.getPopupHeightForRowCount(0));
assertEquals(100, popup.getPopupHeightForRowCount(1));
assertEquals(100, popup.getPopupHeightForRowCount(100));
popup = new BasicComboPopup(new JComboBox(new Object[] { "1" }));
popup.list.setFont(comboBox.getFont().deriveFont(40f));
int oneElemHeight = popup.getPopupHeightForRowCount(1);
assertTrue(oneElemHeight > 0 && oneElemHeight != 100);
popup = new BasicComboPopup(new JComboBox(new Object[] { "1", "2", "3" }));
popup.list.setFont(comboBox.getFont().deriveFont(40f));
assertEquals(oneElemHeight, popup.getPopupHeightForRowCount(1));
assertEquals(2 * oneElemHeight, popup.getPopupHeightForRowCount(2));
assertEquals(3 * oneElemHeight, popup.getPopupHeightForRowCount(3));
assertEquals(3 * oneElemHeight, popup.getPopupHeightForRowCount(100));
assertEquals(100, popup.getPopupHeightForRowCount(0));
}
private MouseEvent createMouseEvent(final int x, final int y) {
return new MouseEvent(comboBox, MouseEvent.MOUSE_CLICKED, 0, 0, x, y, 0, false);
}
@SuppressWarnings("deprecation")
private void createVisibleComboBox() {
JFrame frame = new JFrame();
frame.getContentPane().add(comboBox);
frame.pack();
frame.show();
}
private class PopupMenuController implements PopupMenuListener {
public static final int MENU_CANCELED = 1;
public static final int MENU_VISIBLE = 2;
public static final int MENU_INVISIBLE = 3;
private PopupMenuEvent event;
private int eventType;
public void popupMenuCanceled(final PopupMenuEvent e) {
event = e;
eventType = MENU_CANCELED;
}
public void popupMenuWillBecomeInvisible(final PopupMenuEvent e) {
event = e;
eventType = MENU_INVISIBLE;
}
public void popupMenuWillBecomeVisible(final PopupMenuEvent e) {
event = e;
eventType = MENU_VISIBLE;
}
public PopupMenuEvent getEvent() {
return event;
}
public int getEventType() {
return eventType;
}
public void reset() {
event = null;
eventType = 0;
}
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v10.services;
import com.google.ads.googleads.v10.resources.BatchJob;
import com.google.ads.googleads.v10.resources.BatchJobName;
import com.google.ads.googleads.v10.services.stub.BatchJobServiceStub;
import com.google.ads.googleads.v10.services.stub.BatchJobServiceStubSettings;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.longrunning.OperationFuture;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.longrunning.Operation;
import com.google.longrunning.OperationsClient;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service to manage batch jobs.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* String customerId = "customerId-1581184615";
* BatchJobOperation operation = BatchJobOperation.newBuilder().build();
* MutateBatchJobResponse response = batchJobServiceClient.mutateBatchJob(customerId, operation);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the BatchJobServiceClient object to clean up resources
* such as threads. In the example above, try-with-resources is used, which automatically calls
* close().
*
* <p>The surface of this class includes several types of Java methods for each of the API's
* methods:
*
* <ol>
* <li> A "flattened" method. With this type of method, the fields of the request type have been
* converted into function parameters. It may be the case that not all fields are available as
* parameters, and not every API method will have a flattened method entry point.
* <li> A "request object" method. This type of method only takes one parameter, a request object,
* which must be constructed before the call. Not every API method will have a request object
* method.
* <li> A "callable" method. This type of method takes no parameters and returns an immutable API
* callable object, which can be used to initiate calls to the service.
* </ol>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of BatchJobServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* BatchJobServiceSettings batchJobServiceSettings =
* BatchJobServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* BatchJobServiceClient batchJobServiceClient =
* BatchJobServiceClient.create(batchJobServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* BatchJobServiceSettings batchJobServiceSettings =
* BatchJobServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* BatchJobServiceClient batchJobServiceClient =
* BatchJobServiceClient.create(batchJobServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class BatchJobServiceClient implements BackgroundResource {
private final BatchJobServiceSettings settings;
private final BatchJobServiceStub stub;
private final OperationsClient operationsClient;
/** Constructs an instance of BatchJobServiceClient with default settings. */
public static final BatchJobServiceClient create() throws IOException {
return create(BatchJobServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of BatchJobServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final BatchJobServiceClient create(BatchJobServiceSettings settings)
throws IOException {
return new BatchJobServiceClient(settings);
}
/**
* Constructs an instance of BatchJobServiceClient, using the given stub for making calls. This is
* for advanced usage - prefer using create(BatchJobServiceSettings).
*/
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public static final BatchJobServiceClient create(BatchJobServiceStub stub) {
return new BatchJobServiceClient(stub);
}
/**
* Constructs an instance of BatchJobServiceClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected BatchJobServiceClient(BatchJobServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((BatchJobServiceStubSettings) settings.getStubSettings()).createStub();
this.operationsClient = OperationsClient.create(this.stub.getOperationsStub());
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
protected BatchJobServiceClient(BatchJobServiceStub stub) {
this.settings = null;
this.stub = stub;
this.operationsClient = OperationsClient.create(this.stub.getOperationsStub());
}
public final BatchJobServiceSettings getSettings() {
return settings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public BatchJobServiceStub getStub() {
return stub;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
public final OperationsClient getOperationsClient() {
return operationsClient;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Mutates a batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]() [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* String customerId = "customerId-1581184615";
* BatchJobOperation operation = BatchJobOperation.newBuilder().build();
* MutateBatchJobResponse response = batchJobServiceClient.mutateBatchJob(customerId, operation);
* }
* }</pre>
*
* @param customerId Required. The ID of the customer for which to create a batch job.
* @param operation Required. The operation to perform on an individual batch job.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final MutateBatchJobResponse mutateBatchJob(
String customerId, BatchJobOperation operation) {
MutateBatchJobRequest request =
MutateBatchJobRequest.newBuilder()
.setCustomerId(customerId)
.setOperation(operation)
.build();
return mutateBatchJob(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Mutates a batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]() [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* MutateBatchJobRequest request =
* MutateBatchJobRequest.newBuilder()
* .setCustomerId("customerId-1581184615")
* .setOperation(BatchJobOperation.newBuilder().build())
* .build();
* MutateBatchJobResponse response = batchJobServiceClient.mutateBatchJob(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final MutateBatchJobResponse mutateBatchJob(MutateBatchJobRequest request) {
return mutateBatchJobCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Mutates a batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]()
* [InternalError]() [QuotaError]() [RequestError]() [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* MutateBatchJobRequest request =
* MutateBatchJobRequest.newBuilder()
* .setCustomerId("customerId-1581184615")
* .setOperation(BatchJobOperation.newBuilder().build())
* .build();
* ApiFuture<MutateBatchJobResponse> future =
* batchJobServiceClient.mutateBatchJobCallable().futureCall(request);
* // Do something.
* MutateBatchJobResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<MutateBatchJobRequest, MutateBatchJobResponse>
mutateBatchJobCallable() {
return stub.mutateBatchJobCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the results of the batch job. The job must be done. Supports standard list paging.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* BatchJobName resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]");
* for (BatchJobResult element :
* batchJobServiceClient.listBatchJobResults(resourceName).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param resourceName Required. The resource name of the batch job whose results are being
* listed.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListBatchJobResultsPagedResponse listBatchJobResults(BatchJobName resourceName) {
ListBatchJobResultsRequest request =
ListBatchJobResultsRequest.newBuilder()
.setResourceName(resourceName == null ? null : resourceName.toString())
.build();
return listBatchJobResults(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the results of the batch job. The job must be done. Supports standard list paging.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* String resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString();
* for (BatchJobResult element :
* batchJobServiceClient.listBatchJobResults(resourceName).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param resourceName Required. The resource name of the batch job whose results are being
* listed.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListBatchJobResultsPagedResponse listBatchJobResults(String resourceName) {
ListBatchJobResultsRequest request =
ListBatchJobResultsRequest.newBuilder().setResourceName(resourceName).build();
return listBatchJobResults(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the results of the batch job. The job must be done. Supports standard list paging.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* ListBatchJobResultsRequest request =
* ListBatchJobResultsRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .setPageToken("pageToken873572522")
* .setPageSize(883849137)
* .build();
* for (BatchJobResult element :
* batchJobServiceClient.listBatchJobResults(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListBatchJobResultsPagedResponse listBatchJobResults(
ListBatchJobResultsRequest request) {
return listBatchJobResultsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the results of the batch job. The job must be done. Supports standard list paging.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* ListBatchJobResultsRequest request =
* ListBatchJobResultsRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .setPageToken("pageToken873572522")
* .setPageSize(883849137)
* .build();
* ApiFuture<BatchJobResult> future =
* batchJobServiceClient.listBatchJobResultsPagedCallable().futureCall(request);
* // Do something.
* for (BatchJobResult element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListBatchJobResultsRequest, ListBatchJobResultsPagedResponse>
listBatchJobResultsPagedCallable() {
return stub.listBatchJobResultsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the results of the batch job. The job must be done. Supports standard list paging.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* ListBatchJobResultsRequest request =
* ListBatchJobResultsRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .setPageToken("pageToken873572522")
* .setPageSize(883849137)
* .build();
* while (true) {
* ListBatchJobResultsResponse response =
* batchJobServiceClient.listBatchJobResultsCallable().call(request);
* for (BatchJobResult element : response.getResponsesList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListBatchJobResultsRequest, ListBatchJobResultsResponse>
listBatchJobResultsCallable() {
return stub.listBatchJobResultsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Runs the batch job.
*
* <p>The Operation.metadata field type is BatchJobMetadata. When finished, the long running
* operation will not contain errors or a response. Instead, use ListBatchJobResults to get the
* results of the job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* BatchJobName resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]");
* batchJobServiceClient.runBatchJobAsync(resourceName).get();
* }
* }</pre>
*
* @param resourceName Required. The resource name of the BatchJob to run.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Empty, BatchJob.BatchJobMetadata> runBatchJobAsync(
BatchJobName resourceName) {
RunBatchJobRequest request =
RunBatchJobRequest.newBuilder()
.setResourceName(resourceName == null ? null : resourceName.toString())
.build();
return runBatchJobAsync(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Runs the batch job.
*
* <p>The Operation.metadata field type is BatchJobMetadata. When finished, the long running
* operation will not contain errors or a response. Instead, use ListBatchJobResults to get the
* results of the job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* String resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString();
* batchJobServiceClient.runBatchJobAsync(resourceName).get();
* }
* }</pre>
*
* @param resourceName Required. The resource name of the BatchJob to run.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Empty, BatchJob.BatchJobMetadata> runBatchJobAsync(
String resourceName) {
RunBatchJobRequest request =
RunBatchJobRequest.newBuilder().setResourceName(resourceName).build();
return runBatchJobAsync(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Runs the batch job.
*
* <p>The Operation.metadata field type is BatchJobMetadata. When finished, the long running
* operation will not contain errors or a response. Instead, use ListBatchJobResults to get the
* results of the job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* RunBatchJobRequest request =
* RunBatchJobRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .build();
* batchJobServiceClient.runBatchJobAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Empty, BatchJob.BatchJobMetadata> runBatchJobAsync(
RunBatchJobRequest request) {
return runBatchJobOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Runs the batch job.
*
* <p>The Operation.metadata field type is BatchJobMetadata. When finished, the long running
* operation will not contain errors or a response. Instead, use ListBatchJobResults to get the
* results of the job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* RunBatchJobRequest request =
* RunBatchJobRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .build();
* OperationFuture<Empty, BatchJob.BatchJobMetadata> future =
* batchJobServiceClient.runBatchJobOperationCallable().futureCall(request);
* // Do something.
* future.get();
* }
* }</pre>
*/
public final OperationCallable<RunBatchJobRequest, Empty, BatchJob.BatchJobMetadata>
runBatchJobOperationCallable() {
return stub.runBatchJobOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Runs the batch job.
*
* <p>The Operation.metadata field type is BatchJobMetadata. When finished, the long running
* operation will not contain errors or a response. Instead, use ListBatchJobResults to get the
* results of the job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* RunBatchJobRequest request =
* RunBatchJobRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .build();
* ApiFuture<Operation> future = batchJobServiceClient.runBatchJobCallable().futureCall(request);
* // Do something.
* future.get();
* }
* }</pre>
*/
public final UnaryCallable<RunBatchJobRequest, Operation> runBatchJobCallable() {
return stub.runBatchJobCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Add operations to the batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
* [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* BatchJobName resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]");
* List<MutateOperation> mutateOperations = new ArrayList<>();
* AddBatchJobOperationsResponse response =
* batchJobServiceClient.addBatchJobOperations(resourceName, mutateOperations);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the batch job.
* @param mutateOperations Required. The list of mutates being added.
* <p>Operations can use negative integers as temp ids to signify dependencies between
* entities created in this batch job. For example, a customer with id = 1234 can create a
* campaign and an ad group in that same campaign by creating a campaign in the first
* operation with the resource name explicitly set to "customers/1234/campaigns/-1", and
* creating an ad group in the second operation with the campaign field also set to
* "customers/1234/campaigns/-1".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AddBatchJobOperationsResponse addBatchJobOperations(
BatchJobName resourceName, List<MutateOperation> mutateOperations) {
AddBatchJobOperationsRequest request =
AddBatchJobOperationsRequest.newBuilder()
.setResourceName(resourceName == null ? null : resourceName.toString())
.addAllMutateOperations(mutateOperations)
.build();
return addBatchJobOperations(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Add operations to the batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
* [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* String resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString();
* List<MutateOperation> mutateOperations = new ArrayList<>();
* AddBatchJobOperationsResponse response =
* batchJobServiceClient.addBatchJobOperations(resourceName, mutateOperations);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the batch job.
* @param mutateOperations Required. The list of mutates being added.
* <p>Operations can use negative integers as temp ids to signify dependencies between
* entities created in this batch job. For example, a customer with id = 1234 can create a
* campaign and an ad group in that same campaign by creating a campaign in the first
* operation with the resource name explicitly set to "customers/1234/campaigns/-1", and
* creating an ad group in the second operation with the campaign field also set to
* "customers/1234/campaigns/-1".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AddBatchJobOperationsResponse addBatchJobOperations(
String resourceName, List<MutateOperation> mutateOperations) {
AddBatchJobOperationsRequest request =
AddBatchJobOperationsRequest.newBuilder()
.setResourceName(resourceName)
.addAllMutateOperations(mutateOperations)
.build();
return addBatchJobOperations(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Add operations to the batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
* [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* BatchJobName resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]");
* String sequenceToken = "sequenceToken-731053384";
* List<MutateOperation> mutateOperations = new ArrayList<>();
* AddBatchJobOperationsResponse response =
* batchJobServiceClient.addBatchJobOperations(
* resourceName, sequenceToken, mutateOperations);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the batch job.
* @param sequenceToken A token used to enforce sequencing.
* <p>The first AddBatchJobOperations request for a batch job should not set sequence_token.
* Subsequent requests must set sequence_token to the value of next_sequence_token received in
* the previous AddBatchJobOperations response.
* @param mutateOperations Required. The list of mutates being added.
* <p>Operations can use negative integers as temp ids to signify dependencies between
* entities created in this batch job. For example, a customer with id = 1234 can create a
* campaign and an ad group in that same campaign by creating a campaign in the first
* operation with the resource name explicitly set to "customers/1234/campaigns/-1", and
* creating an ad group in the second operation with the campaign field also set to
* "customers/1234/campaigns/-1".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AddBatchJobOperationsResponse addBatchJobOperations(
BatchJobName resourceName, String sequenceToken, List<MutateOperation> mutateOperations) {
AddBatchJobOperationsRequest request =
AddBatchJobOperationsRequest.newBuilder()
.setResourceName(resourceName == null ? null : resourceName.toString())
.setSequenceToken(sequenceToken)
.addAllMutateOperations(mutateOperations)
.build();
return addBatchJobOperations(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Add operations to the batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
* [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* String resourceName = BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString();
* String sequenceToken = "sequenceToken-731053384";
* List<MutateOperation> mutateOperations = new ArrayList<>();
* AddBatchJobOperationsResponse response =
* batchJobServiceClient.addBatchJobOperations(
* resourceName, sequenceToken, mutateOperations);
* }
* }</pre>
*
* @param resourceName Required. The resource name of the batch job.
* @param sequenceToken A token used to enforce sequencing.
* <p>The first AddBatchJobOperations request for a batch job should not set sequence_token.
* Subsequent requests must set sequence_token to the value of next_sequence_token received in
* the previous AddBatchJobOperations response.
* @param mutateOperations Required. The list of mutates being added.
* <p>Operations can use negative integers as temp ids to signify dependencies between
* entities created in this batch job. For example, a customer with id = 1234 can create a
* campaign and an ad group in that same campaign by creating a campaign in the first
* operation with the resource name explicitly set to "customers/1234/campaigns/-1", and
* creating an ad group in the second operation with the campaign field also set to
* "customers/1234/campaigns/-1".
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AddBatchJobOperationsResponse addBatchJobOperations(
String resourceName, String sequenceToken, List<MutateOperation> mutateOperations) {
AddBatchJobOperationsRequest request =
AddBatchJobOperationsRequest.newBuilder()
.setResourceName(resourceName)
.setSequenceToken(sequenceToken)
.addAllMutateOperations(mutateOperations)
.build();
return addBatchJobOperations(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Add operations to the batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
* [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* AddBatchJobOperationsRequest request =
* AddBatchJobOperationsRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .setSequenceToken("sequenceToken-731053384")
* .addAllMutateOperations(new ArrayList<MutateOperation>())
* .build();
* AddBatchJobOperationsResponse response = batchJobServiceClient.addBatchJobOperations(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final AddBatchJobOperationsResponse addBatchJobOperations(
AddBatchJobOperationsRequest request) {
return addBatchJobOperationsCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Add operations to the batch job.
*
* <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [BatchJobError]()
* [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()
* [ResourceCountLimitExceededError]()
*
* <p>Sample code:
*
* <pre>{@code
* try (BatchJobServiceClient batchJobServiceClient = BatchJobServiceClient.create()) {
* AddBatchJobOperationsRequest request =
* AddBatchJobOperationsRequest.newBuilder()
* .setResourceName(BatchJobName.of("[CUSTOMER_ID]", "[BATCH_JOB_ID]").toString())
* .setSequenceToken("sequenceToken-731053384")
* .addAllMutateOperations(new ArrayList<MutateOperation>())
* .build();
* ApiFuture<AddBatchJobOperationsResponse> future =
* batchJobServiceClient.addBatchJobOperationsCallable().futureCall(request);
* // Do something.
* AddBatchJobOperationsResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<AddBatchJobOperationsRequest, AddBatchJobOperationsResponse>
addBatchJobOperationsCallable() {
return stub.addBatchJobOperationsCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListBatchJobResultsPagedResponse
extends AbstractPagedListResponse<
ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult,
ListBatchJobResultsPage, ListBatchJobResultsFixedSizeCollection> {
public static ApiFuture<ListBatchJobResultsPagedResponse> createAsync(
PageContext<ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult>
context,
ApiFuture<ListBatchJobResultsResponse> futureResponse) {
ApiFuture<ListBatchJobResultsPage> futurePage =
ListBatchJobResultsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListBatchJobResultsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListBatchJobResultsPagedResponse(ListBatchJobResultsPage page) {
super(page, ListBatchJobResultsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListBatchJobResultsPage
extends AbstractPage<
ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult,
ListBatchJobResultsPage> {
private ListBatchJobResultsPage(
PageContext<ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult>
context,
ListBatchJobResultsResponse response) {
super(context, response);
}
private static ListBatchJobResultsPage createEmptyPage() {
return new ListBatchJobResultsPage(null, null);
}
@Override
protected ListBatchJobResultsPage createPage(
PageContext<ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult>
context,
ListBatchJobResultsResponse response) {
return new ListBatchJobResultsPage(context, response);
}
@Override
public ApiFuture<ListBatchJobResultsPage> createPageAsync(
PageContext<ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult>
context,
ApiFuture<ListBatchJobResultsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListBatchJobResultsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListBatchJobResultsRequest, ListBatchJobResultsResponse, BatchJobResult,
ListBatchJobResultsPage, ListBatchJobResultsFixedSizeCollection> {
private ListBatchJobResultsFixedSizeCollection(
List<ListBatchJobResultsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListBatchJobResultsFixedSizeCollection createEmptyCollection() {
return new ListBatchJobResultsFixedSizeCollection(null, 0);
}
@Override
protected ListBatchJobResultsFixedSizeCollection createCollection(
List<ListBatchJobResultsPage> pages, int collectionSize) {
return new ListBatchJobResultsFixedSizeCollection(pages, collectionSize);
}
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import java.util.List;
import com.google.common.collect.ImmutableList;
import java.util.Collections;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFFlowAddVer14 implements OFFlowAdd {
private static final Logger logger = LoggerFactory.getLogger(OFFlowAddVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int MINIMUM_LENGTH = 56;
// maximum OF message length: 16 bit, unsigned
final static int MAXIMUM_LENGTH = 0xFFFF;
private final static long DEFAULT_XID = 0x0L;
private final static U64 DEFAULT_COOKIE = U64.ZERO;
private final static U64 DEFAULT_COOKIE_MASK = U64.ZERO;
private final static TableId DEFAULT_TABLE_ID = TableId.ZERO;
private final static int DEFAULT_IDLE_TIMEOUT = 0x0;
private final static int DEFAULT_HARD_TIMEOUT = 0x0;
private final static int DEFAULT_PRIORITY = 0x0;
private final static OFBufferId DEFAULT_BUFFER_ID = OFBufferId.NO_BUFFER;
private final static OFPort DEFAULT_OUT_PORT = OFPort.ANY;
private final static OFGroup DEFAULT_OUT_GROUP = OFGroup.ANY;
private final static Set<OFFlowModFlags> DEFAULT_FLAGS = ImmutableSet.<OFFlowModFlags>of();
private final static int DEFAULT_IMPORTANCE = 0x0;
private final static Match DEFAULT_MATCH = OFFactoryVer14.MATCH_WILDCARD_ALL;
private final static List<OFInstruction> DEFAULT_INSTRUCTIONS = ImmutableList.<OFInstruction>of();
// OF message fields
private final long xid;
private final U64 cookie;
private final U64 cookieMask;
private final TableId tableId;
private final int idleTimeout;
private final int hardTimeout;
private final int priority;
private final OFBufferId bufferId;
private final OFPort outPort;
private final OFGroup outGroup;
private final Set<OFFlowModFlags> flags;
private final int importance;
private final Match match;
private final List<OFInstruction> instructions;
//
// Immutable default instance
final static OFFlowAddVer14 DEFAULT = new OFFlowAddVer14(
DEFAULT_XID, DEFAULT_COOKIE, DEFAULT_COOKIE_MASK, DEFAULT_TABLE_ID, DEFAULT_IDLE_TIMEOUT, DEFAULT_HARD_TIMEOUT, DEFAULT_PRIORITY, DEFAULT_BUFFER_ID, DEFAULT_OUT_PORT, DEFAULT_OUT_GROUP, DEFAULT_FLAGS, DEFAULT_IMPORTANCE, DEFAULT_MATCH, DEFAULT_INSTRUCTIONS
);
// package private constructor - used by readers, builders, and factory
OFFlowAddVer14(long xid, U64 cookie, U64 cookieMask, TableId tableId, int idleTimeout, int hardTimeout, int priority, OFBufferId bufferId, OFPort outPort, OFGroup outGroup, Set<OFFlowModFlags> flags, int importance, Match match, List<OFInstruction> instructions) {
if(cookie == null) {
throw new NullPointerException("OFFlowAddVer14: property cookie cannot be null");
}
if(cookieMask == null) {
throw new NullPointerException("OFFlowAddVer14: property cookieMask cannot be null");
}
if(tableId == null) {
throw new NullPointerException("OFFlowAddVer14: property tableId cannot be null");
}
if(bufferId == null) {
throw new NullPointerException("OFFlowAddVer14: property bufferId cannot be null");
}
if(outPort == null) {
throw new NullPointerException("OFFlowAddVer14: property outPort cannot be null");
}
if(outGroup == null) {
throw new NullPointerException("OFFlowAddVer14: property outGroup cannot be null");
}
if(flags == null) {
throw new NullPointerException("OFFlowAddVer14: property flags cannot be null");
}
if(match == null) {
throw new NullPointerException("OFFlowAddVer14: property match cannot be null");
}
if(instructions == null) {
throw new NullPointerException("OFFlowAddVer14: property instructions cannot be null");
}
this.xid = U32.normalize(xid);
this.cookie = cookie;
this.cookieMask = cookieMask;
this.tableId = tableId;
this.idleTimeout = U16.normalize(idleTimeout);
this.hardTimeout = U16.normalize(hardTimeout);
this.priority = U16.normalize(priority);
this.bufferId = bufferId;
this.outPort = outPort;
this.outGroup = outGroup;
this.flags = flags;
this.importance = U16.normalize(importance);
this.match = match;
this.instructions = instructions;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.FLOW_MOD;
}
@Override
public long getXid() {
return xid;
}
@Override
public Match getMatch() {
return match;
}
@Override
public U64 getCookie() {
return cookie;
}
@Override
public OFFlowModCommand getCommand() {
return OFFlowModCommand.ADD;
}
@Override
public int getIdleTimeout() {
return idleTimeout;
}
@Override
public int getHardTimeout() {
return hardTimeout;
}
@Override
public int getPriority() {
return priority;
}
@Override
public OFBufferId getBufferId() {
return bufferId;
}
@Override
public OFPort getOutPort() {
return outPort;
}
@Override
public Set<OFFlowModFlags> getFlags() {
return flags;
}
@Override
public List<OFAction> getActions()throws UnsupportedOperationException {
for (OFInstruction inst : this.instructions) {
if (inst instanceof OFInstructionApplyActions) {
OFInstructionApplyActions iap = (OFInstructionApplyActions)inst;
return iap.getActions();
}
}
return Collections.emptyList();
}
@Override
public U64 getCookieMask() {
return cookieMask;
}
@Override
public TableId getTableId() {
return tableId;
}
@Override
public OFGroup getOutGroup() {
return outGroup;
}
@Override
public List<OFInstruction> getInstructions() {
return instructions;
}
@Override
public int getImportance() {
return importance;
}
public OFFlowAdd.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFFlowAdd.Builder {
final OFFlowAddVer14 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean cookieSet;
private U64 cookie;
private boolean cookieMaskSet;
private U64 cookieMask;
private boolean tableIdSet;
private TableId tableId;
private boolean idleTimeoutSet;
private int idleTimeout;
private boolean hardTimeoutSet;
private int hardTimeout;
private boolean prioritySet;
private int priority;
private boolean bufferIdSet;
private OFBufferId bufferId;
private boolean outPortSet;
private OFPort outPort;
private boolean outGroupSet;
private OFGroup outGroup;
private boolean flagsSet;
private Set<OFFlowModFlags> flags;
private boolean importanceSet;
private int importance;
private boolean matchSet;
private Match match;
private boolean instructionsSet;
private List<OFInstruction> instructions;
BuilderWithParent(OFFlowAddVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.FLOW_MOD;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFFlowAdd.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public Match getMatch() {
return match;
}
@Override
public OFFlowAdd.Builder setMatch(Match match) {
this.match = match;
this.matchSet = true;
return this;
}
@Override
public U64 getCookie() {
return cookie;
}
@Override
public OFFlowAdd.Builder setCookie(U64 cookie) {
this.cookie = cookie;
this.cookieSet = true;
return this;
}
@Override
public OFFlowModCommand getCommand() {
return OFFlowModCommand.ADD;
}
@Override
public int getIdleTimeout() {
return idleTimeout;
}
@Override
public OFFlowAdd.Builder setIdleTimeout(int idleTimeout) {
this.idleTimeout = idleTimeout;
this.idleTimeoutSet = true;
return this;
}
@Override
public int getHardTimeout() {
return hardTimeout;
}
@Override
public OFFlowAdd.Builder setHardTimeout(int hardTimeout) {
this.hardTimeout = hardTimeout;
this.hardTimeoutSet = true;
return this;
}
@Override
public int getPriority() {
return priority;
}
@Override
public OFFlowAdd.Builder setPriority(int priority) {
this.priority = priority;
this.prioritySet = true;
return this;
}
@Override
public OFBufferId getBufferId() {
return bufferId;
}
@Override
public OFFlowAdd.Builder setBufferId(OFBufferId bufferId) {
this.bufferId = bufferId;
this.bufferIdSet = true;
return this;
}
@Override
public OFPort getOutPort() {
return outPort;
}
@Override
public OFFlowAdd.Builder setOutPort(OFPort outPort) {
this.outPort = outPort;
this.outPortSet = true;
return this;
}
@Override
public Set<OFFlowModFlags> getFlags() {
return flags;
}
@Override
public OFFlowAdd.Builder setFlags(Set<OFFlowModFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public List<OFAction> getActions()throws UnsupportedOperationException {
if (!this.instructionsSet)
return Collections.emptyList();
for (OFInstruction inst : this.instructions) {
if (inst instanceof OFInstructionApplyActions) {
OFInstructionApplyActions iap = (OFInstructionApplyActions)inst;
return iap.getActions();
}
}
return Collections.emptyList();
}
@Override
public OFFlowAdd.Builder setActions(List<OFAction> actions) throws UnsupportedOperationException {
OFInstructionApplyActionsVer14.Builder builder = new OFInstructionApplyActionsVer14.Builder();
builder.setActions(actions);
this.instructions = Collections.singletonList((OFInstruction)builder.build());
this.instructionsSet = true;
return this;
}
@Override
public U64 getCookieMask() {
return cookieMask;
}
@Override
public OFFlowAdd.Builder setCookieMask(U64 cookieMask) {
this.cookieMask = cookieMask;
this.cookieMaskSet = true;
return this;
}
@Override
public TableId getTableId() {
return tableId;
}
@Override
public OFFlowAdd.Builder setTableId(TableId tableId) {
this.tableId = tableId;
this.tableIdSet = true;
return this;
}
@Override
public OFGroup getOutGroup() {
return outGroup;
}
@Override
public OFFlowAdd.Builder setOutGroup(OFGroup outGroup) {
this.outGroup = outGroup;
this.outGroupSet = true;
return this;
}
@Override
public List<OFInstruction> getInstructions() {
return instructions;
}
@Override
public OFFlowAdd.Builder setInstructions(List<OFInstruction> instructions) {
this.instructions = instructions;
this.instructionsSet = true;
return this;
}
@Override
public int getImportance() {
return importance;
}
@Override
public OFFlowAdd.Builder setImportance(int importance) {
this.importance = importance;
this.importanceSet = true;
return this;
}
@Override
public OFFlowAdd build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
U64 cookie = this.cookieSet ? this.cookie : parentMessage.cookie;
if(cookie == null)
throw new NullPointerException("Property cookie must not be null");
U64 cookieMask = this.cookieMaskSet ? this.cookieMask : parentMessage.cookieMask;
if(cookieMask == null)
throw new NullPointerException("Property cookieMask must not be null");
TableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId;
if(tableId == null)
throw new NullPointerException("Property tableId must not be null");
int idleTimeout = this.idleTimeoutSet ? this.idleTimeout : parentMessage.idleTimeout;
int hardTimeout = this.hardTimeoutSet ? this.hardTimeout : parentMessage.hardTimeout;
int priority = this.prioritySet ? this.priority : parentMessage.priority;
OFBufferId bufferId = this.bufferIdSet ? this.bufferId : parentMessage.bufferId;
if(bufferId == null)
throw new NullPointerException("Property bufferId must not be null");
OFPort outPort = this.outPortSet ? this.outPort : parentMessage.outPort;
if(outPort == null)
throw new NullPointerException("Property outPort must not be null");
OFGroup outGroup = this.outGroupSet ? this.outGroup : parentMessage.outGroup;
if(outGroup == null)
throw new NullPointerException("Property outGroup must not be null");
Set<OFFlowModFlags> flags = this.flagsSet ? this.flags : parentMessage.flags;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
int importance = this.importanceSet ? this.importance : parentMessage.importance;
Match match = this.matchSet ? this.match : parentMessage.match;
if(match == null)
throw new NullPointerException("Property match must not be null");
List<OFInstruction> instructions = this.instructionsSet ? this.instructions : parentMessage.instructions;
if(instructions == null)
throw new NullPointerException("Property instructions must not be null");
//
return new OFFlowAddVer14(
xid,
cookie,
cookieMask,
tableId,
idleTimeout,
hardTimeout,
priority,
bufferId,
outPort,
outGroup,
flags,
importance,
match,
instructions
);
}
}
static class Builder implements OFFlowAdd.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean cookieSet;
private U64 cookie;
private boolean cookieMaskSet;
private U64 cookieMask;
private boolean tableIdSet;
private TableId tableId;
private boolean idleTimeoutSet;
private int idleTimeout;
private boolean hardTimeoutSet;
private int hardTimeout;
private boolean prioritySet;
private int priority;
private boolean bufferIdSet;
private OFBufferId bufferId;
private boolean outPortSet;
private OFPort outPort;
private boolean outGroupSet;
private OFGroup outGroup;
private boolean flagsSet;
private Set<OFFlowModFlags> flags;
private boolean importanceSet;
private int importance;
private boolean matchSet;
private Match match;
private boolean instructionsSet;
private List<OFInstruction> instructions;
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.FLOW_MOD;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFFlowAdd.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public Match getMatch() {
return match;
}
@Override
public OFFlowAdd.Builder setMatch(Match match) {
this.match = match;
this.matchSet = true;
return this;
}
@Override
public U64 getCookie() {
return cookie;
}
@Override
public OFFlowAdd.Builder setCookie(U64 cookie) {
this.cookie = cookie;
this.cookieSet = true;
return this;
}
@Override
public OFFlowModCommand getCommand() {
return OFFlowModCommand.ADD;
}
@Override
public int getIdleTimeout() {
return idleTimeout;
}
@Override
public OFFlowAdd.Builder setIdleTimeout(int idleTimeout) {
this.idleTimeout = idleTimeout;
this.idleTimeoutSet = true;
return this;
}
@Override
public int getHardTimeout() {
return hardTimeout;
}
@Override
public OFFlowAdd.Builder setHardTimeout(int hardTimeout) {
this.hardTimeout = hardTimeout;
this.hardTimeoutSet = true;
return this;
}
@Override
public int getPriority() {
return priority;
}
@Override
public OFFlowAdd.Builder setPriority(int priority) {
this.priority = priority;
this.prioritySet = true;
return this;
}
@Override
public OFBufferId getBufferId() {
return bufferId;
}
@Override
public OFFlowAdd.Builder setBufferId(OFBufferId bufferId) {
this.bufferId = bufferId;
this.bufferIdSet = true;
return this;
}
@Override
public OFPort getOutPort() {
return outPort;
}
@Override
public OFFlowAdd.Builder setOutPort(OFPort outPort) {
this.outPort = outPort;
this.outPortSet = true;
return this;
}
@Override
public Set<OFFlowModFlags> getFlags() {
return flags;
}
@Override
public OFFlowAdd.Builder setFlags(Set<OFFlowModFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public List<OFAction> getActions()throws UnsupportedOperationException {
if (!this.instructionsSet)
return Collections.emptyList();
for (OFInstruction inst : this.instructions) {
if (inst instanceof OFInstructionApplyActions) {
OFInstructionApplyActions iap = (OFInstructionApplyActions)inst;
return iap.getActions();
}
}
return Collections.emptyList();
}
@Override
public OFFlowAdd.Builder setActions(List<OFAction> actions) throws UnsupportedOperationException {
OFInstructionApplyActionsVer14.Builder builder = new OFInstructionApplyActionsVer14.Builder();
builder.setActions(actions);
this.instructions = Collections.singletonList((OFInstruction)builder.build());
this.instructionsSet = true;
return this;
}
@Override
public U64 getCookieMask() {
return cookieMask;
}
@Override
public OFFlowAdd.Builder setCookieMask(U64 cookieMask) {
this.cookieMask = cookieMask;
this.cookieMaskSet = true;
return this;
}
@Override
public TableId getTableId() {
return tableId;
}
@Override
public OFFlowAdd.Builder setTableId(TableId tableId) {
this.tableId = tableId;
this.tableIdSet = true;
return this;
}
@Override
public OFGroup getOutGroup() {
return outGroup;
}
@Override
public OFFlowAdd.Builder setOutGroup(OFGroup outGroup) {
this.outGroup = outGroup;
this.outGroupSet = true;
return this;
}
@Override
public List<OFInstruction> getInstructions() {
return instructions;
}
@Override
public OFFlowAdd.Builder setInstructions(List<OFInstruction> instructions) {
this.instructions = instructions;
this.instructionsSet = true;
return this;
}
@Override
public int getImportance() {
return importance;
}
@Override
public OFFlowAdd.Builder setImportance(int importance) {
this.importance = importance;
this.importanceSet = true;
return this;
}
//
@Override
public OFFlowAdd build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
U64 cookie = this.cookieSet ? this.cookie : DEFAULT_COOKIE;
if(cookie == null)
throw new NullPointerException("Property cookie must not be null");
U64 cookieMask = this.cookieMaskSet ? this.cookieMask : DEFAULT_COOKIE_MASK;
if(cookieMask == null)
throw new NullPointerException("Property cookieMask must not be null");
TableId tableId = this.tableIdSet ? this.tableId : DEFAULT_TABLE_ID;
if(tableId == null)
throw new NullPointerException("Property tableId must not be null");
int idleTimeout = this.idleTimeoutSet ? this.idleTimeout : DEFAULT_IDLE_TIMEOUT;
int hardTimeout = this.hardTimeoutSet ? this.hardTimeout : DEFAULT_HARD_TIMEOUT;
int priority = this.prioritySet ? this.priority : DEFAULT_PRIORITY;
OFBufferId bufferId = this.bufferIdSet ? this.bufferId : DEFAULT_BUFFER_ID;
if(bufferId == null)
throw new NullPointerException("Property bufferId must not be null");
OFPort outPort = this.outPortSet ? this.outPort : DEFAULT_OUT_PORT;
if(outPort == null)
throw new NullPointerException("Property outPort must not be null");
OFGroup outGroup = this.outGroupSet ? this.outGroup : DEFAULT_OUT_GROUP;
if(outGroup == null)
throw new NullPointerException("Property outGroup must not be null");
Set<OFFlowModFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
int importance = this.importanceSet ? this.importance : DEFAULT_IMPORTANCE;
Match match = this.matchSet ? this.match : DEFAULT_MATCH;
if(match == null)
throw new NullPointerException("Property match must not be null");
List<OFInstruction> instructions = this.instructionsSet ? this.instructions : DEFAULT_INSTRUCTIONS;
if(instructions == null)
throw new NullPointerException("Property instructions must not be null");
return new OFFlowAddVer14(
xid,
cookie,
cookieMask,
tableId,
idleTimeout,
hardTimeout,
priority,
bufferId,
outPort,
outGroup,
flags,
importance,
match,
instructions
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFFlowAdd> {
@Override
public OFFlowAdd readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 5
byte version = bb.readByte();
if(version != (byte) 0x5)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version);
// fixed value property type == 14
byte type = bb.readByte();
if(type != (byte) 0xe)
throw new OFParseError("Wrong type: Expected=OFType.FLOW_MOD(14), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
U64 cookie = U64.ofRaw(bb.readLong());
U64 cookieMask = U64.ofRaw(bb.readLong());
TableId tableId = TableId.readByte(bb);
// fixed value property command == 0
short command = bb.readByte();
if(command != (short) 0x0)
throw new OFParseError("Wrong command: Expected=OFFlowModCommand.ADD(0), got="+command);
int idleTimeout = U16.f(bb.readShort());
int hardTimeout = U16.f(bb.readShort());
int priority = U16.f(bb.readShort());
OFBufferId bufferId = OFBufferId.of(bb.readInt());
OFPort outPort = OFPort.read4Bytes(bb);
OFGroup outGroup = OFGroup.read4Bytes(bb);
Set<OFFlowModFlags> flags = OFFlowModFlagsSerializerVer14.readFrom(bb);
int importance = U16.f(bb.readShort());
Match match = ChannelUtilsVer14.readOFMatch(bb);
List<OFInstruction> instructions = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFInstructionVer14.READER);
OFFlowAddVer14 flowAddVer14 = new OFFlowAddVer14(
xid,
cookie,
cookieMask,
tableId,
idleTimeout,
hardTimeout,
priority,
bufferId,
outPort,
outGroup,
flags,
importance,
match,
instructions
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", flowAddVer14);
return flowAddVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFFlowAddVer14Funnel FUNNEL = new OFFlowAddVer14Funnel();
static class OFFlowAddVer14Funnel implements Funnel<OFFlowAddVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFFlowAddVer14 message, PrimitiveSink sink) {
// fixed value property version = 5
sink.putByte((byte) 0x5);
// fixed value property type = 14
sink.putByte((byte) 0xe);
// FIXME: skip funnel of length
sink.putLong(message.xid);
message.cookie.putTo(sink);
message.cookieMask.putTo(sink);
message.tableId.putTo(sink);
// fixed value property command = 0
sink.putShort((short) 0x0);
sink.putInt(message.idleTimeout);
sink.putInt(message.hardTimeout);
sink.putInt(message.priority);
message.bufferId.putTo(sink);
message.outPort.putTo(sink);
message.outGroup.putTo(sink);
OFFlowModFlagsSerializerVer14.putTo(message.flags, sink);
sink.putInt(message.importance);
message.match.putTo(sink);
FunnelUtils.putList(message.instructions, sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFFlowAddVer14> {
@Override
public void write(ByteBuf bb, OFFlowAddVer14 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 5
bb.writeByte((byte) 0x5);
// fixed value property type = 14
bb.writeByte((byte) 0xe);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
bb.writeLong(message.cookie.getValue());
bb.writeLong(message.cookieMask.getValue());
message.tableId.writeByte(bb);
// fixed value property command = 0
bb.writeByte((short) 0x0);
bb.writeShort(U16.t(message.idleTimeout));
bb.writeShort(U16.t(message.hardTimeout));
bb.writeShort(U16.t(message.priority));
bb.writeInt(message.bufferId.getInt());
message.outPort.write4Bytes(bb);
message.outGroup.write4Bytes(bb);
OFFlowModFlagsSerializerVer14.writeTo(bb, message.flags);
bb.writeShort(U16.t(message.importance));
message.match.writeTo(bb);
ChannelUtils.writeList(bb, message.instructions);
// update length field
int length = bb.writerIndex() - startIndex;
if (length > MAXIMUM_LENGTH) {
throw new IllegalArgumentException("OFFlowAddVer14: message length (" + length + ") exceeds maximum (0xFFFF)");
}
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFFlowAddVer14(");
b.append("xid=").append(xid);
b.append(", ");
b.append("cookie=").append(cookie);
b.append(", ");
b.append("cookieMask=").append(cookieMask);
b.append(", ");
b.append("tableId=").append(tableId);
b.append(", ");
b.append("idleTimeout=").append(idleTimeout);
b.append(", ");
b.append("hardTimeout=").append(hardTimeout);
b.append(", ");
b.append("priority=").append(priority);
b.append(", ");
b.append("bufferId=").append(bufferId);
b.append(", ");
b.append("outPort=").append(outPort);
b.append(", ");
b.append("outGroup=").append(outGroup);
b.append(", ");
b.append("flags=").append(flags);
b.append(", ");
b.append("importance=").append(importance);
b.append(", ");
b.append("match=").append(match);
b.append(", ");
b.append("instructions=").append(instructions);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFFlowAddVer14 other = (OFFlowAddVer14) obj;
if( xid != other.xid)
return false;
if (cookie == null) {
if (other.cookie != null)
return false;
} else if (!cookie.equals(other.cookie))
return false;
if (cookieMask == null) {
if (other.cookieMask != null)
return false;
} else if (!cookieMask.equals(other.cookieMask))
return false;
if (tableId == null) {
if (other.tableId != null)
return false;
} else if (!tableId.equals(other.tableId))
return false;
if( idleTimeout != other.idleTimeout)
return false;
if( hardTimeout != other.hardTimeout)
return false;
if( priority != other.priority)
return false;
if (bufferId == null) {
if (other.bufferId != null)
return false;
} else if (!bufferId.equals(other.bufferId))
return false;
if (outPort == null) {
if (other.outPort != null)
return false;
} else if (!outPort.equals(other.outPort))
return false;
if (outGroup == null) {
if (other.outGroup != null)
return false;
} else if (!outGroup.equals(other.outGroup))
return false;
if (flags == null) {
if (other.flags != null)
return false;
} else if (!flags.equals(other.flags))
return false;
if( importance != other.importance)
return false;
if (match == null) {
if (other.match != null)
return false;
} else if (!match.equals(other.match))
return false;
if (instructions == null) {
if (other.instructions != null)
return false;
} else if (!instructions.equals(other.instructions))
return false;
return true;
}
@Override
public boolean equalsIgnoreXid(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFFlowAddVer14 other = (OFFlowAddVer14) obj;
// ignore XID
if (cookie == null) {
if (other.cookie != null)
return false;
} else if (!cookie.equals(other.cookie))
return false;
if (cookieMask == null) {
if (other.cookieMask != null)
return false;
} else if (!cookieMask.equals(other.cookieMask))
return false;
if (tableId == null) {
if (other.tableId != null)
return false;
} else if (!tableId.equals(other.tableId))
return false;
if( idleTimeout != other.idleTimeout)
return false;
if( hardTimeout != other.hardTimeout)
return false;
if( priority != other.priority)
return false;
if (bufferId == null) {
if (other.bufferId != null)
return false;
} else if (!bufferId.equals(other.bufferId))
return false;
if (outPort == null) {
if (other.outPort != null)
return false;
} else if (!outPort.equals(other.outPort))
return false;
if (outGroup == null) {
if (other.outGroup != null)
return false;
} else if (!outGroup.equals(other.outGroup))
return false;
if (flags == null) {
if (other.flags != null)
return false;
} else if (!flags.equals(other.flags))
return false;
if( importance != other.importance)
return false;
if (match == null) {
if (other.match != null)
return false;
} else if (!match.equals(other.match))
return false;
if (instructions == null) {
if (other.instructions != null)
return false;
} else if (!instructions.equals(other.instructions))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((cookie == null) ? 0 : cookie.hashCode());
result = prime * result + ((cookieMask == null) ? 0 : cookieMask.hashCode());
result = prime * result + ((tableId == null) ? 0 : tableId.hashCode());
result = prime * result + idleTimeout;
result = prime * result + hardTimeout;
result = prime * result + priority;
result = prime * result + ((bufferId == null) ? 0 : bufferId.hashCode());
result = prime * result + ((outPort == null) ? 0 : outPort.hashCode());
result = prime * result + ((outGroup == null) ? 0 : outGroup.hashCode());
result = prime * result + ((flags == null) ? 0 : flags.hashCode());
result = prime * result + importance;
result = prime * result + ((match == null) ? 0 : match.hashCode());
result = prime * result + ((instructions == null) ? 0 : instructions.hashCode());
return result;
}
@Override
public int hashCodeIgnoreXid() {
final int prime = 31;
int result = 1;
// ignore XID
result = prime * result + ((cookie == null) ? 0 : cookie.hashCode());
result = prime * result + ((cookieMask == null) ? 0 : cookieMask.hashCode());
result = prime * result + ((tableId == null) ? 0 : tableId.hashCode());
result = prime * result + idleTimeout;
result = prime * result + hardTimeout;
result = prime * result + priority;
result = prime * result + ((bufferId == null) ? 0 : bufferId.hashCode());
result = prime * result + ((outPort == null) ? 0 : outPort.hashCode());
result = prime * result + ((outGroup == null) ? 0 : outGroup.hashCode());
result = prime * result + ((flags == null) ? 0 : flags.hashCode());
result = prime * result + importance;
result = prime * result + ((match == null) ? 0 : match.hashCode());
result = prime * result + ((instructions == null) ? 0 : instructions.hashCode());
return result;
}
}
| |
/**
* Copyright (c) 2016 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.contrib.hdht;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Random;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.FileUtils;
import com.esotericsoftware.kryo.Kryo;
import com.google.common.util.concurrent.MoreExecutors;
import com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap;
import com.datatorrent.contrib.hdht.wal.FSWALReader;
import com.datatorrent.contrib.hdht.wal.FSWALWriter;
import com.datatorrent.lib.fileaccess.FileAccessFSImpl;
import com.datatorrent.lib.helper.OperatorContextTestHelper;
import com.datatorrent.lib.util.KryoCloneUtils;
import com.datatorrent.netlet.util.Slice;
public class WALTest
{
static final Random rand = new Random();
File file = new File("target/hds");
static byte[] genRandomByteArray(int len)
{
byte[] val = new byte[len];
rand.nextBytes(val);
return val;
}
static Slice genRandomKey(int len)
{
byte[] val = new byte[len];
rand.nextBytes(val);
return new Slice(val);
}
/**
* - Write some data to WAL
* - Read the data back. The amount of data read should be
* same as amount of data written.
* @throws IOException
*/
@Test
public void testWalWriteAndRead() throws IOException
{
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
int keySize = 100;
int valSize = 110;
int delKeySize = 50;
int purgeKeySize = 60;
int numTuples = 100;
int numPuts = 0;
int numDeletes = 0;
int numPurges = 0;
FSWALWriter wWriter = new FSWALWriter(bfs, new HDHTLogEntry.HDHTLogSerializer(), 1, "WAL-0");
for (int i = 0; i < numTuples; i++) {
int type = rand.nextInt(3);
switch (type) {
case 0 :
wWriter.append(new HDHTLogEntry.PutEntry(0, genRandomKey(keySize), genRandomByteArray(valSize)));
numPuts++;
break;
case 1 :
wWriter.append(new HDHTLogEntry.PurgeEntry(0, genRandomKey(purgeKeySize), genRandomKey(purgeKeySize)));
numPurges++;
break;
case 2 :
wWriter.append(new HDHTLogEntry.DeleteEntry(0, genRandomKey(delKeySize)));
numDeletes++;
break;
default:
}
}
wWriter.close();
File wal0 = new File(file.getAbsoluteFile().toString() + "/1/WAL-0");
Assert.assertEquals("WAL file created ", true, wal0.exists());
FSWALReader wReader = new FSWALReader(bfs, new HDHTLogEntry.HDHTLogSerializer(), 1, "WAL-0");
int tuples = 0;
int puts = 0;
int purges = 0;
int deletes = 0;
while (wReader.advance()) {
HDHTLogEntry.HDHTWalEntry entry = (HDHTLogEntry.HDHTWalEntry)wReader.get();
logger.debug("entry read {}", entry);
if (entry instanceof HDHTLogEntry.PutEntry) {
HDHTLogEntry.PutEntry keyVal = (HDHTLogEntry.PutEntry)entry;
Assert.assertEquals("Key size ", keySize, keyVal.key.length);
Assert.assertEquals("Value size ", valSize, keyVal.val.length);
puts++;
} else if (entry instanceof HDHTLogEntry.PurgeEntry) {
HDHTLogEntry.PurgeEntry purge = (HDHTLogEntry.PurgeEntry)entry;
Assert.assertEquals("Purge start key size", purgeKeySize, purge.startKey.length);
Assert.assertEquals("Purge end key size", purgeKeySize, purge.endKey.length);
purges++;
} else if (entry instanceof HDHTLogEntry.DeleteEntry) {
HDHTLogEntry.DeleteEntry del = (HDHTLogEntry.DeleteEntry)entry;
Assert.assertEquals("Del key size ", delKeySize, del.key.length);
deletes++;
}
tuples++;
}
wReader.close();
Assert.assertEquals("Write and read same number of tuples ", numTuples, tuples);
Assert.assertEquals("Number of puts ", numPuts, puts);
Assert.assertEquals("Number of purges", numPurges, purges);
Assert.assertEquals("Number of deletes ", numDeletes, deletes);
}
/**
* Read WAL from middle of the file by seeking to known valid
* offset and start reading from that point till the end.
*/
@Test
public void testWalSkip() throws IOException
{
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
long offset = 0;
FSWALWriter wWriter = new FSWALWriter(bfs, new HDHTLogEntry.HDHTLogSerializer(), 1, "WAL-0");
int totalTuples = 100;
int recoveryTuples = 30;
for (int i = 0; i < totalTuples; i++) {
wWriter.append(new HDHTLogEntry.PutEntry(0, genRandomKey(100), genRandomByteArray(100)));
if (i == recoveryTuples) {
offset = wWriter.getSize();
}
}
logger.info("total file size is " + wWriter.getSize() + " recovery offset is " + offset);
wWriter.close();
FSWALReader wReader = new FSWALReader(bfs, new HDHTLogEntry.HDHTLogSerializer(), 1, "WAL-0");
wReader.seek(offset);
int read = 0;
while (wReader.advance()) {
read++;
wReader.get();
}
wReader.close();
Assert.assertEquals("Number of tuples read after skipping", read, (totalTuples - recoveryTuples - 1));
}
/**
* Test WAL rolling functionality, set maximumWal size to 1024.
* Write some data which will go over WAL size.
* call endWindow
* Write some more data.
* Two files should be created.
* @throws IOException
*/
@Test
public void testWalRolling() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
final long BUCKET1 = 1L;
final int OPERATOR_ID = 1;
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setKeyComparator(new HDHTWriterTest.SequenceComparator());
hds.setFlushIntervalCount(5);
hds.setFlushSize(1000);
hds.setMaxWalFileSize(1024);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(OPERATOR_ID, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(0);
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.beginWindow(1);
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.forceWal();
File wal0 = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-0");
Assert.assertEquals("New Wal-0 created ", wal0.exists(), true);
File wal1 = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-1");
Assert.assertEquals("New Wal-1 created ", wal1.exists(), true);
}
/**
* Rest recovery of operator cache. Steps
* - Add some tuples
* - Flush data to disk.
* - Add some more tuples, which are not flushed to data, but flushed to WAL.
* - Save WAL state (operator checkpoint)
* - Add a tuple to start recovery from tuples.
* @throws IOException
*/
@Test
public void testWalRecovery() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setKeyComparator(new HDHTWriterTest.SequenceComparator());
hds.setFlushSize(1);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(1);
hds.put(1, genRandomKey(500), genRandomByteArray(500));
hds.put(1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.checkpointed(1);
hds.beginWindow(2);
hds.put(1, genRandomKey(500), genRandomByteArray(500));
hds.put(1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.checkpointed(2);
hds.committed(2);
// Tuples added till this point is written to data files,
// Tuples being added in this window, will not be written to data files
// but will be saved in WAL. These should get recovered when bucket
// is initialized for use next time.
hds.beginWindow(3);
hds.put(1, genRandomKey(500), genRandomByteArray(500));
hds.put(1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.checkpointed(3);
hds.forceWal();
hds.teardown();
/* Get a check-pointed state of the WAL */
HDHTWriter newOperator = KryoCloneUtils.cloneObject(new Kryo(), hds);
newOperator.setKeyComparator(new HDHTWriterTest.SequenceComparator());
newOperator.setFlushIntervalCount(1);
newOperator.setFlushSize(3);
newOperator.writeExecutor = MoreExecutors.sameThreadExecutor();
newOperator.setFileStore(bfs);
newOperator.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
// This should run recovery, as first tuple is added in bucket
newOperator.beginWindow(4);
newOperator.put(1, genRandomKey(500), genRandomByteArray(500));
// current tuple, being added is put into write cache.
Assert.assertEquals("Number of tuples in write cache ", 1, newOperator.unflushedDataSize(1));
// two tuples are put in to committed write cache.
Assert.assertEquals("Number of tuples in committed cache ", 2, newOperator.committedDataSize(1));
newOperator.put(1, genRandomKey(500), genRandomByteArray(500));
newOperator.put(1, genRandomKey(500), genRandomByteArray(500));
newOperator.put(1, genRandomKey(500), genRandomByteArray(500));
newOperator.endWindow();
newOperator.forceWal();
File wal1 = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-1");
Assert.assertEquals("New Wal-1 created ", wal1.exists(), true);
}
/**
* Test WAL cleanup functionality, WAL file is deleted, once data
* from it is written to data files.
* @throws IOException
*/
@Test
public void testOldWalCleanup() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
final long BUCKET1 = 1L;
final int OPERATOR_ID = 1;
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setKeyComparator(new HDHTWriterTest.SequenceComparator());
// Flush at every window.
hds.setFlushIntervalCount(2);
hds.setFlushSize(1000);
hds.setMaxWalFileSize(4000);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(1);
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.beginWindow(2);
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
// log file will roll at this point because of limit on WAL file size,
hds.endWindow();
File wal0 = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-0");
Assert.assertEquals("New Wal-0 created ", wal0.exists(), true);
hds.beginWindow(3);
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.put(BUCKET1, genRandomKey(500), genRandomByteArray(500));
hds.endWindow();
hds.checkpointed(3);
hds.committed(3);
// Data till this point is committed to disk, and old WAL file WAL-0
// is deleted, as all data from that file is committed.
hds.forceWal();
wal0 = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-0");
Assert.assertEquals("New Wal-0 deleted ", wal0.exists(), false);
File wal1 = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-1");
Assert.assertEquals("New Wal-1 created ", wal1.exists(), true);
}
static Slice getLongByteArray(long key)
{
ByteBuffer bb = ByteBuffer.allocate(8);
bb.putLong(key);
return new Slice(bb.array());
}
/**
* checkpointed(1) 1 -> 10
* checkpointed(2) 1 -> 20
* checkpointed(3) 1 -> 30
* checkpointed(4) 1 -> 40
* committed(2)
* checkpointed(5)
*
* restore from 3rd checkpoint.
* do a get and value should be 30.
*/
@Test
public void testWalRecoveryValues() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
((MockFileAccess)bfs).disableChecksum();
FileAccessFSImpl walfs = new MockFileAccess();
walfs.setBasePath(file.getAbsolutePath());
walfs.init();
((MockFileAccess)walfs).disableChecksum();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setWalStore(walfs);
hds.setFlushSize(1);
hds.setFlushIntervalCount(1);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(1);
hds.put(1, getLongByteArray(1), getLongByteArray(10).toByteArray());
hds.endWindow();
hds.checkpointed(1);
hds.beginWindow(2);
hds.put(1, getLongByteArray(1), getLongByteArray(20).toByteArray());
hds.endWindow();
hds.checkpointed(2);
hds.beginWindow(3);
hds.put(1, getLongByteArray(1), getLongByteArray(30).toByteArray());
hds.endWindow();
hds.checkpointed(3);
// Commit window id 2
hds.committed(2);
// use checkpoint after window 3 for recovery.
HDHTWriter newOperator = KryoCloneUtils.cloneObject(new Kryo(), hds);
hds.beginWindow(4);
hds.put(1, getLongByteArray(1), getLongByteArray(40).toByteArray());
hds.put(1, getLongByteArray(2), getLongByteArray(200).toByteArray());
hds.endWindow();
hds.checkpointed(4);
hds.beginWindow(5);
hds.put(1, getLongByteArray(1), getLongByteArray(50).toByteArray());
hds.put(1, getLongByteArray(2), getLongByteArray(210).toByteArray());
hds.endWindow();
hds.checkpointed(5);
hds.forceWal();
/* Simulate recovery after failure, checkpoint is restored to after
processing of window 3.
*/
newOperator.setFlushIntervalCount(1);
newOperator.setFileStore(bfs);
newOperator.setWalStore(bfs);
newOperator.setFlushSize(1);
newOperator.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
newOperator.writeExecutor = MoreExecutors.sameThreadExecutor();
// This should run recovery, as first tuple is added in bucket
newOperator.beginWindow(4);
newOperator.put(1, getLongByteArray(1), getLongByteArray(40).toByteArray());
newOperator.put(1, getLongByteArray(2), getLongByteArray(200).toByteArray());
// current tuple, being added is put into write cache.
Assert.assertEquals("Number of tuples in write cache ", 2, newOperator.unflushedDataSize(1));
// one tuples are put in to committed write cache.
Assert.assertEquals("Number of tuples in committed cache ", 1, newOperator.committedDataSize(1));
newOperator.endWindow();
newOperator.checkpointed(4);
newOperator.forceWal();
/* The latest value is recovered from WAL */
ByteBuffer bb = ByteBuffer.wrap(newOperator.getUncommitted(1, getLongByteArray(1)));
long l = bb.getLong();
Assert.assertEquals("Value of 1 is recovered from WAL", 40, l);
newOperator.committed(3);
bb = ByteBuffer.wrap(newOperator.get(1, getLongByteArray(1)));
l = bb.getLong();
Assert.assertEquals("Value is persisted ", 30, l);
newOperator.committed(4);
bb = ByteBuffer.wrap(newOperator.get(1, getLongByteArray(1)));
l = bb.getLong();
Assert.assertEquals("Value is persisted ", 40, l);
}
/**
* checkpointed(1) 1 -> 10
* checkpointed(2) 1 -> 20
* committed(2)
* checkpointed(3) 1 -> 30
* committed(3)
* checkpointed(4)
* committed(4)
*
* no null pointer exception should occure.
*/
@Test
public void testIssue4008() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
((MockFileAccess)bfs).disableChecksum();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setFlushSize(2);
hds.setFlushIntervalCount(1);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(1);
hds.put(1, getLongByteArray(1), getLongByteArray(10).toByteArray());
hds.endWindow();
hds.checkpointed(1);
hds.beginWindow(2);
hds.put(1, getLongByteArray(1), getLongByteArray(20).toByteArray());
hds.endWindow();
hds.checkpointed(2);
hds.committed(2);
hds.beginWindow(3);
hds.put(1, getLongByteArray(1), getLongByteArray(30).toByteArray());
hds.endWindow();
hds.checkpointed(3);
hds.committed(3);
hds.beginWindow(4);
hds.endWindow();
hds.checkpointed(4);
hds.committed(4);
/* The latest value is recovered from WAL */
ByteBuffer bb = ByteBuffer.wrap(hds.get(1, getLongByteArray(1)));
long l = bb.getLong();
Assert.assertEquals("Value of 1 is recovered from WAL", 30, l);
}
@Test
public void testMultipleBucketsPerWal() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
((MockFileAccess)bfs).disableChecksum();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setFlushSize(1);
hds.setFlushIntervalCount(1);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(1);
hds.put(1, getLongByteArray(1), getLongByteArray(10).toByteArray());
hds.put(2, getLongByteArray(2), getLongByteArray(100).toByteArray());
hds.endWindow();
hds.checkpointed(1);
hds.beginWindow(2);
hds.put(1, getLongByteArray(1), getLongByteArray(20).toByteArray());
hds.put(2, getLongByteArray(2), getLongByteArray(200).toByteArray());
hds.endWindow();
hds.checkpointed(2);
// Commit window id 3
hds.committed(3);
// Check Buckets 1 and 2 are created
File meta1 = new File(file.getAbsoluteFile().toString() + "/1/_META");
Assert.assertEquals("New _META created for bucket 1", true, meta1.exists());
File meta2 = new File(file.getAbsoluteFile().toString() + "/2/_META");
Assert.assertEquals("New _META created for bucket 2", true, meta2.exists());
File file1 = new File(file.getAbsoluteFile().toString() + "/1/1-0");
Assert.assertEquals("New _META created for bucket 1", true, file1.exists());
File file2 = new File(file.getAbsoluteFile().toString() + "/2/2-0");
Assert.assertEquals("New _META created for bucket 2", true, file2.exists());
// Check WAL file is created under /WAL/
File walFile = new File(file.getAbsoluteFile().toString() + "/WAL/1/_WAL-0");
Assert.assertEquals("Single WAL file created for buckets 1 & 2", true, walFile.exists());
File secondWalFile = new File(file.getAbsoluteFile().toString() + "/WAL/2/_WAL-0");
Assert.assertEquals("No separate WAL file created for bucket 2", false, secondWalFile.exists());
}
/**
* checkpointed(1) bucket 1, key 1 -> 10 bucket 2, key 1 -> 100
* checkpointed(2) bucket 1, key 1 -> 20 bucket 2, key 1 -> 200
* checkpointed(3) bucket 1, key 1 -> 30 bucket 2, key 1 -> 300 committed(2)
* restore from 3rd checkpoint. do a get for bucket 1, key 1 and value should
* be 20. do a get for bucket 2, key 1 and value should be 200.
*/
@Test
public void testMultipleBucketsPerWalRecovery() throws IOException
{
File file = new File("target/hds");
FileUtils.deleteDirectory(file);
FileAccessFSImpl bfs = new MockFileAccess();
bfs.setBasePath(file.getAbsolutePath());
bfs.init();
((MockFileAccess)bfs).disableChecksum();
FileAccessFSImpl walFs = new MockFileAccess();
walFs.setBasePath(file.getAbsolutePath() + "/WAL/");
walFs.init();
((MockFileAccess)walFs).disableChecksum();
HDHTWriter hds = new HDHTWriter();
hds.setFileStore(bfs);
hds.setFlushSize(1);
hds.setFlushIntervalCount(1);
hds.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
hds.writeExecutor = MoreExecutors.sameThreadExecutor();
hds.beginWindow(1);
hds.put(1, getLongByteArray(1), getLongByteArray(10).toByteArray());
hds.put(2, getLongByteArray(1), getLongByteArray(100).toByteArray());
hds.endWindow();
hds.checkpointed(1);
hds.beginWindow(2);
hds.put(1, getLongByteArray(1), getLongByteArray(20).toByteArray());
hds.put(2, getLongByteArray(1), getLongByteArray(200).toByteArray());
hds.endWindow();
hds.checkpointed(2);
hds.beginWindow(3);
hds.put(1, getLongByteArray(1), getLongByteArray(30).toByteArray());
hds.put(2, getLongByteArray(1), getLongByteArray(300).toByteArray());
hds.endWindow();
hds.checkpointed(3);
// Commit window id 2
hds.committed(2);
HDHTWriter newOperator = KryoCloneUtils.cloneObject(new Kryo(), hds);
hds.beginWindow(4);
hds.put(1, getLongByteArray(1), getLongByteArray(40).toByteArray());
hds.put(1, getLongByteArray(2), getLongByteArray(200).toByteArray());
hds.put(2, getLongByteArray(1), getLongByteArray(400).toByteArray());
hds.endWindow();
hds.checkpointed(4);
hds.beginWindow(5);
hds.put(1, getLongByteArray(1), getLongByteArray(50).toByteArray());
hds.put(1, getLongByteArray(2), getLongByteArray(210).toByteArray());
hds.put(2, getLongByteArray(1), getLongByteArray(500).toByteArray());
hds.put(2, getLongByteArray(2), getLongByteArray(100).toByteArray());
hds.endWindow();
hds.checkpointed(5);
hds.forceWal();
/* Simulate recovery after failure, checkpoint is restored to after
processing of window 3.
*/
newOperator.setFlushIntervalCount(1);
newOperator.setFileStore(bfs);
newOperator.setWalStore(walFs);
newOperator.setFlushSize(1);
newOperator.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new DefaultAttributeMap()));
newOperator.writeExecutor = MoreExecutors.sameThreadExecutor();
// This should run recovery, as first tuple is added in bucket
newOperator.beginWindow(4);
newOperator.put(1, getLongByteArray(0), getLongByteArray(60).toByteArray());
newOperator.endWindow();
newOperator.checkpointed(4);
newOperator.beginWindow(5);
newOperator.put(1, getLongByteArray(2), getLongByteArray(700).toByteArray());
newOperator.put(2, getLongByteArray(1), getLongByteArray(800).toByteArray());
newOperator.put(2, getLongByteArray(2), getLongByteArray(1000).toByteArray());
// current tuple, being added is put into write cache.
Assert.assertEquals("Number of tuples in write cache ", 1, newOperator.unflushedDataSize(1));
// one tuples are put in to committed write cache.
Assert.assertEquals("Number of tuples in committed cache ", 1, newOperator.committedDataSize(1));
newOperator.endWindow();
newOperator.checkpointed(5);
newOperator.beginWindow(6);
newOperator.put(1, getLongByteArray(3), getLongByteArray(300).toByteArray());
newOperator.put(1, getLongByteArray(4), getLongByteArray(100).toByteArray());
newOperator.endWindow();
Assert.assertEquals("Number of tuples in write cache ", 2, newOperator.unflushedDataSize(1));
newOperator.checkpointed(6);
/* The latest value is recovered from WAL */
Assert.assertEquals("Value of key=1, bucket=1 is recovered from WAL", 30, getLong(newOperator.getUncommitted(1, getLongByteArray(1))));
Assert.assertEquals("Value of key=1, bucket=1 is recovered from WAL", 60, getLong(newOperator.getUncommitted(1, getLongByteArray(0))));
Assert.assertEquals("Value of key=2, bucket=1 is recovered from WAL", 700, getLong(newOperator.getUncommitted(1, getLongByteArray(2))));
Assert.assertEquals("Value of key=1, bucket=2 is recovered from WAL", 800, getLong(newOperator.getUncommitted(2, getLongByteArray(1))));
Assert.assertEquals("Value of key=2, bucket=2 is recovered from WAL", 1000, getLong(newOperator.getUncommitted(2, getLongByteArray(2))));
/*Committed value check*/
Assert.assertEquals("Bucket 1, Key 1 value should be 20", 20, getLong(newOperator.get(1, getLongByteArray(1))));
Assert.assertNull("Key 2 should not be present in bucket 1", newOperator.get(1, getLongByteArray(2)));
Assert.assertEquals("Bucket 2, Key 1 value should be 200", 200, getLong(newOperator.get(2, getLongByteArray(1))));
Assert.assertNull("Key 2 should not be present in bucket 2", newOperator.get(2, getLongByteArray(2)));
newOperator.committed(3);
Assert.assertEquals("Value is persisted ", 30, getLong(newOperator.get(1, getLongByteArray(1))));
Assert.assertEquals("Value is persisted ", 300, getLong(newOperator.get(2, getLongByteArray(1))));
newOperator.committed(4);
Assert.assertEquals("Value is persisted ", 60, getLong(newOperator.get(1, getLongByteArray(0))));
newOperator.committed(5);
Assert.assertEquals("Value is persisted ", 700, getLong(newOperator.get(1, getLongByteArray(2))));
Assert.assertEquals("Value is persisted ", 800, getLong(newOperator.get(2, getLongByteArray(1))));
Assert.assertEquals("Value is persisted ", 1000, getLong(newOperator.get(2, getLongByteArray(2))));
newOperator.checkpointed(6);
newOperator.forceWal();
newOperator.committed(6);
Assert.assertEquals("Value is persisted ", 300, getLong(newOperator.get(1, getLongByteArray(3))));
Assert.assertEquals("Value is persisted ", 100, getLong(newOperator.get(1, getLongByteArray(4))));
}
public long getLong(byte[] value) throws IOException
{
ByteBuffer bb = ByteBuffer.wrap(value);
return bb.getLong();
}
private static final Logger logger = LoggerFactory.getLogger(WALTest.class);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.hbase.metrics.Interns;
import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.MutableFastCounter;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Hadoop2 implementation of MetricsRegionServerSource.
*
* Implements BaseSource through BaseSourceImpl, following the pattern
*/
@InterfaceAudience.Private
public class MetricsRegionServerSourceImpl
extends BaseSourceImpl implements MetricsRegionServerSource {
final MetricsRegionServerWrapper rsWrap;
private final MetricHistogram putHisto;
private final MetricHistogram putBatchHisto;
private final MetricHistogram deleteHisto;
private final MetricHistogram deleteBatchHisto;
private final MetricHistogram checkAndDeleteHisto;
private final MetricHistogram checkAndPutHisto;
private final MetricHistogram checkAndMutateHisto;
private final MetricHistogram getHisto;
private final MetricHistogram incrementHisto;
private final MetricHistogram appendHisto;
private final MetricHistogram replayHisto;
private final MetricHistogram scanSizeHisto;
private final MetricHistogram scanTimeHisto;
private final MutableFastCounter slowPut;
private final MutableFastCounter slowDelete;
private final MutableFastCounter slowGet;
private final MutableFastCounter slowIncrement;
private final MutableFastCounter slowAppend;
// split related metrics
private final MutableFastCounter splitRequest;
private final MutableFastCounter splitSuccess;
private final MetricHistogram splitTimeHisto;
// flush related metrics
private final MetricHistogram flushTimeHisto;
private final MetricHistogram flushMemstoreSizeHisto;
private final MetricHistogram flushOutputSizeHisto;
private final MutableFastCounter flushedMemstoreBytes;
private final MutableFastCounter flushedOutputBytes;
// compaction related metrics
private final MetricHistogram compactionTimeHisto;
private final MetricHistogram compactionInputFileCountHisto;
private final MetricHistogram compactionInputSizeHisto;
private final MetricHistogram compactionOutputFileCountHisto;
private final MetricHistogram compactionOutputSizeHisto;
private final MutableFastCounter compactedInputBytes;
private final MutableFastCounter compactedOutputBytes;
private final MetricHistogram majorCompactionTimeHisto;
private final MetricHistogram majorCompactionInputFileCountHisto;
private final MetricHistogram majorCompactionInputSizeHisto;
private final MetricHistogram majorCompactionOutputFileCountHisto;
private final MetricHistogram majorCompactionOutputSizeHisto;
private final MutableFastCounter majorCompactedInputBytes;
private final MutableFastCounter majorCompactedOutputBytes;
// pause monitor metrics
private final MutableFastCounter infoPauseThresholdExceeded;
private final MutableFastCounter warnPauseThresholdExceeded;
private final MetricHistogram pausesWithGc;
private final MetricHistogram pausesWithoutGc;
private final MutableFastCounter scannerLeaseExpiredCount;
public MetricsRegionServerSourceImpl(MetricsRegionServerWrapper rsWrap) {
this(METRICS_NAME, METRICS_DESCRIPTION, METRICS_CONTEXT, METRICS_JMX_CONTEXT, rsWrap);
}
public MetricsRegionServerSourceImpl(String metricsName,
String metricsDescription,
String metricsContext,
String metricsJmxContext,
MetricsRegionServerWrapper rsWrap) {
super(metricsName, metricsDescription, metricsContext, metricsJmxContext);
this.rsWrap = rsWrap;
putHisto = getMetricsRegistry().newTimeHistogram(PUT_KEY);
putBatchHisto = getMetricsRegistry().newTimeHistogram(PUT_BATCH_KEY);
slowPut = getMetricsRegistry().newCounter(SLOW_PUT_KEY, SLOW_PUT_DESC, 0L);
deleteHisto = getMetricsRegistry().newTimeHistogram(DELETE_KEY);
slowDelete = getMetricsRegistry().newCounter(SLOW_DELETE_KEY, SLOW_DELETE_DESC, 0L);
deleteBatchHisto = getMetricsRegistry().newTimeHistogram(DELETE_BATCH_KEY);
checkAndDeleteHisto = getMetricsRegistry().newTimeHistogram(CHECK_AND_DELETE_KEY);
checkAndPutHisto = getMetricsRegistry().newTimeHistogram(CHECK_AND_PUT_KEY);
checkAndMutateHisto = getMetricsRegistry().newTimeHistogram(CHECK_AND_MUTATE_KEY);
getHisto = getMetricsRegistry().newTimeHistogram(GET_KEY);
slowGet = getMetricsRegistry().newCounter(SLOW_GET_KEY, SLOW_GET_DESC, 0L);
incrementHisto = getMetricsRegistry().newTimeHistogram(INCREMENT_KEY);
slowIncrement = getMetricsRegistry().newCounter(SLOW_INCREMENT_KEY, SLOW_INCREMENT_DESC, 0L);
appendHisto = getMetricsRegistry().newTimeHistogram(APPEND_KEY);
slowAppend = getMetricsRegistry().newCounter(SLOW_APPEND_KEY, SLOW_APPEND_DESC, 0L);
replayHisto = getMetricsRegistry().newTimeHistogram(REPLAY_KEY);
scanSizeHisto = getMetricsRegistry().newSizeHistogram(SCAN_SIZE_KEY);
scanTimeHisto = getMetricsRegistry().newTimeHistogram(SCAN_TIME_KEY);
flushTimeHisto = getMetricsRegistry().newTimeHistogram(FLUSH_TIME, FLUSH_TIME_DESC);
flushMemstoreSizeHisto = getMetricsRegistry()
.newSizeHistogram(FLUSH_MEMSTORE_SIZE, FLUSH_MEMSTORE_SIZE_DESC);
flushOutputSizeHisto = getMetricsRegistry().newSizeHistogram(FLUSH_OUTPUT_SIZE,
FLUSH_OUTPUT_SIZE_DESC);
flushedOutputBytes = getMetricsRegistry().newCounter(FLUSHED_OUTPUT_BYTES,
FLUSHED_OUTPUT_BYTES_DESC, 0L);
flushedMemstoreBytes = getMetricsRegistry().newCounter(FLUSHED_MEMSTORE_BYTES,
FLUSHED_MEMSTORE_BYTES_DESC, 0L);
compactionTimeHisto = getMetricsRegistry()
.newTimeHistogram(COMPACTION_TIME, COMPACTION_TIME_DESC);
compactionInputFileCountHisto = getMetricsRegistry()
.newHistogram(COMPACTION_INPUT_FILE_COUNT, COMPACTION_INPUT_FILE_COUNT_DESC);
compactionInputSizeHisto = getMetricsRegistry()
.newSizeHistogram(COMPACTION_INPUT_SIZE, COMPACTION_INPUT_SIZE_DESC);
compactionOutputFileCountHisto = getMetricsRegistry()
.newHistogram(COMPACTION_OUTPUT_FILE_COUNT, COMPACTION_OUTPUT_FILE_COUNT_DESC);
compactionOutputSizeHisto = getMetricsRegistry()
.newSizeHistogram(COMPACTION_OUTPUT_SIZE, COMPACTION_OUTPUT_SIZE_DESC);
compactedInputBytes = getMetricsRegistry()
.newCounter(COMPACTED_INPUT_BYTES, COMPACTED_INPUT_BYTES_DESC, 0L);
compactedOutputBytes = getMetricsRegistry()
.newCounter(COMPACTED_OUTPUT_BYTES, COMPACTED_OUTPUT_BYTES_DESC, 0L);
majorCompactionTimeHisto = getMetricsRegistry()
.newTimeHistogram(MAJOR_COMPACTION_TIME, MAJOR_COMPACTION_TIME_DESC);
majorCompactionInputFileCountHisto = getMetricsRegistry()
.newHistogram(MAJOR_COMPACTION_INPUT_FILE_COUNT, MAJOR_COMPACTION_INPUT_FILE_COUNT_DESC);
majorCompactionInputSizeHisto = getMetricsRegistry()
.newSizeHistogram(MAJOR_COMPACTION_INPUT_SIZE, MAJOR_COMPACTION_INPUT_SIZE_DESC);
majorCompactionOutputFileCountHisto = getMetricsRegistry()
.newHistogram(MAJOR_COMPACTION_OUTPUT_FILE_COUNT, MAJOR_COMPACTION_OUTPUT_FILE_COUNT_DESC);
majorCompactionOutputSizeHisto = getMetricsRegistry()
.newSizeHistogram(MAJOR_COMPACTION_OUTPUT_SIZE, MAJOR_COMPACTION_OUTPUT_SIZE_DESC);
majorCompactedInputBytes = getMetricsRegistry()
.newCounter(MAJOR_COMPACTED_INPUT_BYTES, MAJOR_COMPACTED_INPUT_BYTES_DESC, 0L);
majorCompactedOutputBytes = getMetricsRegistry()
.newCounter(MAJOR_COMPACTED_OUTPUT_BYTES, MAJOR_COMPACTED_OUTPUT_BYTES_DESC, 0L);
splitTimeHisto = getMetricsRegistry().newTimeHistogram(SPLIT_KEY);
splitRequest = getMetricsRegistry().newCounter(SPLIT_REQUEST_KEY, SPLIT_REQUEST_DESC, 0L);
splitSuccess = getMetricsRegistry().newCounter(SPLIT_SUCCESS_KEY, SPLIT_SUCCESS_DESC, 0L);
// pause monitor metrics
infoPauseThresholdExceeded = getMetricsRegistry().newCounter(INFO_THRESHOLD_COUNT_KEY,
INFO_THRESHOLD_COUNT_DESC, 0L);
warnPauseThresholdExceeded = getMetricsRegistry().newCounter(WARN_THRESHOLD_COUNT_KEY,
WARN_THRESHOLD_COUNT_DESC, 0L);
pausesWithGc = getMetricsRegistry().newTimeHistogram(PAUSE_TIME_WITH_GC_KEY);
pausesWithoutGc = getMetricsRegistry().newTimeHistogram(PAUSE_TIME_WITHOUT_GC_KEY);
scannerLeaseExpiredCount = getMetricsRegistry().newCounter(SCANNER_LEASE_EXPIRED_COUNT, SCANNER_LEASE_EXPIRED_COUNT_DESC, 0L);
}
@Override
public void updatePut(long t) {
putHisto.add(t);
}
@Override
public void updateDelete(long t) {
deleteHisto.add(t);
}
@Override
public void updateGet(long t) {
getHisto.add(t);
}
@Override
public void updateIncrement(long t) {
incrementHisto.add(t);
}
@Override
public void updateAppend(long t) {
appendHisto.add(t);
}
@Override
public void updateReplay(long t) {
replayHisto.add(t);
}
@Override
public void updateScanSize(long scanSize) {
scanSizeHisto.add(scanSize);
}
@Override
public void updateScanTime(long t) {
scanTimeHisto.add(t);
}
@Override
public void incrSlowPut() {
slowPut.incr();
}
@Override
public void incrSlowDelete() {
slowDelete.incr();
}
@Override
public void incrSlowGet() {
slowGet.incr();
}
@Override
public void incrSlowIncrement() {
slowIncrement.incr();
}
@Override
public void incrSlowAppend() {
slowAppend.incr();
}
@Override
public void incrSplitRequest() {
splitRequest.incr();
}
@Override
public void incrSplitSuccess() {
splitSuccess.incr();
}
@Override
public void updateSplitTime(long t) {
splitTimeHisto.add(t);
}
@Override
public void updateFlushTime(long t) {
flushTimeHisto.add(t);
}
@Override
public void updateFlushMemStoreSize(long bytes) {
flushMemstoreSizeHisto.add(bytes);
flushedMemstoreBytes.incr(bytes);
}
@Override
public void updateFlushOutputSize(long bytes) {
flushOutputSizeHisto.add(bytes);
flushedOutputBytes.incr(bytes);
}
@Override
public void updateCompactionTime(boolean isMajor, long t) {
compactionTimeHisto.add(t);
if (isMajor) {
majorCompactionTimeHisto.add(t);
}
}
@Override
public void updateCompactionInputFileCount(boolean isMajor, long c) {
compactionInputFileCountHisto.add(c);
if (isMajor) {
majorCompactionInputFileCountHisto.add(c);
}
}
@Override
public void updateCompactionInputSize(boolean isMajor, long bytes) {
compactionInputSizeHisto.add(bytes);
compactedInputBytes.incr(bytes);
if (isMajor) {
majorCompactionInputSizeHisto.add(bytes);
majorCompactedInputBytes.incr(bytes);
}
}
@Override
public void updateCompactionOutputFileCount(boolean isMajor, long c) {
compactionOutputFileCountHisto.add(c);
if (isMajor) {
majorCompactionOutputFileCountHisto.add(c);
}
}
@Override
public void updateCompactionOutputSize(boolean isMajor, long bytes) {
compactionOutputSizeHisto.add(bytes);
compactedOutputBytes.incr(bytes);
if (isMajor) {
majorCompactionOutputSizeHisto.add(bytes);
majorCompactedOutputBytes.incr(bytes);
}
}
@Override
public void incrScannerLeaseExpired() {
scannerLeaseExpiredCount.incr();
}
/**
* Yes this is a get function that doesn't return anything. Thanks Hadoop for breaking all
* expectations of java programmers. Instead of returning anything Hadoop metrics expects
* getMetrics to push the metrics into the collector.
*
* @param metricsCollector Collector to accept metrics
* @param all push all or only changed?
*/
@Override
public void getMetrics(MetricsCollector metricsCollector, boolean all) {
MetricsRecordBuilder mrb = metricsCollector.addRecord(metricsName);
// rsWrap can be null because this function is called inside of init.
if (rsWrap != null) {
addGaugesToMetricsRecordBuilder(mrb)
.addCounter(Interns.info(TOTAL_REQUEST_COUNT, TOTAL_REQUEST_COUNT_DESC),
rsWrap.getTotalRequestCount())
.addCounter(Interns.info(TOTAL_ROW_ACTION_REQUEST_COUNT,
TOTAL_ROW_ACTION_REQUEST_COUNT_DESC), rsWrap.getTotalRowActionRequestCount())
.addCounter(Interns.info(READ_REQUEST_COUNT, READ_REQUEST_COUNT_DESC),
rsWrap.getReadRequestsCount())
.addCounter(Interns.info(FILTERED_READ_REQUEST_COUNT,
FILTERED_READ_REQUEST_COUNT_DESC), rsWrap.getFilteredReadRequestsCount())
.addCounter(Interns.info(WRITE_REQUEST_COUNT, WRITE_REQUEST_COUNT_DESC),
rsWrap.getWriteRequestsCount())
.addCounter(Interns.info(RPC_GET_REQUEST_COUNT, RPC_GET_REQUEST_COUNT_DESC),
rsWrap.getRpcGetRequestsCount())
.addCounter(Interns.info(RPC_FULL_SCAN_REQUEST_COUNT, RPC_FULL_SCAN_REQUEST_COUNT_DESC),
rsWrap.getRpcFullScanRequestsCount())
.addCounter(Interns.info(RPC_SCAN_REQUEST_COUNT, RPC_SCAN_REQUEST_COUNT_DESC),
rsWrap.getRpcScanRequestsCount())
.addCounter(Interns.info(RPC_MULTI_REQUEST_COUNT, RPC_MULTI_REQUEST_COUNT_DESC),
rsWrap.getRpcMultiRequestsCount())
.addCounter(Interns.info(RPC_MUTATE_REQUEST_COUNT, RPC_MUTATE_REQUEST_COUNT_DESC),
rsWrap.getRpcMutateRequestsCount())
.addCounter(Interns.info(CHECK_MUTATE_FAILED_COUNT, CHECK_MUTATE_FAILED_COUNT_DESC),
rsWrap.getCheckAndMutateChecksFailed())
.addCounter(Interns.info(CHECK_MUTATE_PASSED_COUNT, CHECK_MUTATE_PASSED_COUNT_DESC),
rsWrap.getCheckAndMutateChecksPassed())
.addCounter(Interns.info(BLOCK_CACHE_HIT_COUNT, BLOCK_CACHE_HIT_COUNT_DESC),
rsWrap.getBlockCacheHitCount())
.addCounter(Interns.info(BLOCK_CACHE_PRIMARY_HIT_COUNT,
BLOCK_CACHE_PRIMARY_HIT_COUNT_DESC), rsWrap.getBlockCachePrimaryHitCount())
.addCounter(Interns.info(BLOCK_CACHE_MISS_COUNT, BLOCK_COUNT_MISS_COUNT_DESC),
rsWrap.getBlockCacheMissCount())
.addCounter(Interns.info(BLOCK_CACHE_PRIMARY_MISS_COUNT,
BLOCK_COUNT_PRIMARY_MISS_COUNT_DESC), rsWrap.getBlockCachePrimaryMissCount())
.addCounter(Interns.info(BLOCK_CACHE_EVICTION_COUNT, BLOCK_CACHE_EVICTION_COUNT_DESC),
rsWrap.getBlockCacheEvictedCount())
.addCounter(Interns.info(BLOCK_CACHE_PRIMARY_EVICTION_COUNT,
BLOCK_CACHE_PRIMARY_EVICTION_COUNT_DESC),
rsWrap.getBlockCachePrimaryEvictedCount())
.addCounter(Interns.info(BLOCK_CACHE_FAILED_INSERTION_COUNT,
BLOCK_CACHE_FAILED_INSERTION_COUNT_DESC),
rsWrap.getBlockCacheFailedInsertions())
.addCounter(Interns.info(BLOCK_CACHE_DATA_MISS_COUNT, ""),
rsWrap.getDataMissCount())
.addCounter(Interns.info(BLOCK_CACHE_LEAF_INDEX_MISS_COUNT, ""),
rsWrap.getLeafIndexMissCount())
.addCounter(Interns.info(BLOCK_CACHE_BLOOM_CHUNK_MISS_COUNT, ""),
rsWrap.getBloomChunkMissCount())
.addCounter(Interns.info(BLOCK_CACHE_META_MISS_COUNT, ""),
rsWrap.getMetaMissCount())
.addCounter(Interns.info(BLOCK_CACHE_ROOT_INDEX_MISS_COUNT, ""),
rsWrap.getRootIndexMissCount())
.addCounter(Interns.info(BLOCK_CACHE_INTERMEDIATE_INDEX_MISS_COUNT, ""),
rsWrap.getIntermediateIndexMissCount())
.addCounter(Interns.info(BLOCK_CACHE_FILE_INFO_MISS_COUNT, ""),
rsWrap.getFileInfoMissCount())
.addCounter(Interns.info(BLOCK_CACHE_GENERAL_BLOOM_META_MISS_COUNT, ""),
rsWrap.getGeneralBloomMetaMissCount())
.addCounter(Interns.info(BLOCK_CACHE_DELETE_FAMILY_BLOOM_MISS_COUNT, ""),
rsWrap.getDeleteFamilyBloomMissCount())
.addCounter(Interns.info(BLOCK_CACHE_TRAILER_MISS_COUNT, ""),
rsWrap.getTrailerMissCount())
.addCounter(Interns.info(BLOCK_CACHE_DATA_HIT_COUNT, ""),
rsWrap.getDataHitCount())
.addCounter(Interns.info(BLOCK_CACHE_LEAF_INDEX_HIT_COUNT, ""),
rsWrap.getLeafIndexHitCount())
.addCounter(Interns.info(BLOCK_CACHE_BLOOM_CHUNK_HIT_COUNT, ""),
rsWrap.getBloomChunkHitCount())
.addCounter(Interns.info(BLOCK_CACHE_META_HIT_COUNT, ""),
rsWrap.getMetaHitCount())
.addCounter(Interns.info(BLOCK_CACHE_ROOT_INDEX_HIT_COUNT, ""),
rsWrap.getRootIndexHitCount())
.addCounter(Interns.info(BLOCK_CACHE_INTERMEDIATE_INDEX_HIT_COUNT, ""),
rsWrap.getIntermediateIndexHitCount())
.addCounter(Interns.info(BLOCK_CACHE_FILE_INFO_HIT_COUNT, ""),
rsWrap.getFileInfoHitCount())
.addCounter(Interns.info(BLOCK_CACHE_GENERAL_BLOOM_META_HIT_COUNT, ""),
rsWrap.getGeneralBloomMetaHitCount())
.addCounter(Interns.info(BLOCK_CACHE_DELETE_FAMILY_BLOOM_HIT_COUNT, ""),
rsWrap.getDeleteFamilyBloomHitCount())
.addCounter(Interns.info(BLOCK_CACHE_TRAILER_HIT_COUNT, ""),
rsWrap.getTrailerHitCount())
.addCounter(Interns.info(UPDATES_BLOCKED_TIME, UPDATES_BLOCKED_DESC),
rsWrap.getUpdatesBlockedTime())
.addCounter(Interns.info(FLUSHED_CELLS, FLUSHED_CELLS_DESC),
rsWrap.getFlushedCellsCount())
.addCounter(Interns.info(COMPACTED_CELLS, COMPACTED_CELLS_DESC),
rsWrap.getCompactedCellsCount())
.addCounter(Interns.info(MAJOR_COMPACTED_CELLS, MAJOR_COMPACTED_CELLS_DESC),
rsWrap.getMajorCompactedCellsCount())
.addCounter(Interns.info(FLUSHED_CELLS_SIZE, FLUSHED_CELLS_SIZE_DESC),
rsWrap.getFlushedCellsSize())
.addCounter(Interns.info(COMPACTED_CELLS_SIZE, COMPACTED_CELLS_SIZE_DESC),
rsWrap.getCompactedCellsSize())
.addCounter(Interns.info(MAJOR_COMPACTED_CELLS_SIZE, MAJOR_COMPACTED_CELLS_SIZE_DESC),
rsWrap.getMajorCompactedCellsSize())
.addCounter(Interns.info(CELLS_COUNT_COMPACTED_FROM_MOB,
CELLS_COUNT_COMPACTED_FROM_MOB_DESC), rsWrap.getCellsCountCompactedFromMob())
.addCounter(Interns.info(CELLS_COUNT_COMPACTED_TO_MOB,
CELLS_COUNT_COMPACTED_TO_MOB_DESC), rsWrap.getCellsCountCompactedToMob())
.addCounter(Interns.info(CELLS_SIZE_COMPACTED_FROM_MOB,
CELLS_SIZE_COMPACTED_FROM_MOB_DESC), rsWrap.getCellsSizeCompactedFromMob())
.addCounter(Interns.info(CELLS_SIZE_COMPACTED_TO_MOB,
CELLS_SIZE_COMPACTED_TO_MOB_DESC), rsWrap.getCellsSizeCompactedToMob())
.addCounter(Interns.info(MOB_FLUSH_COUNT, MOB_FLUSH_COUNT_DESC),
rsWrap.getMobFlushCount())
.addCounter(Interns.info(MOB_FLUSHED_CELLS_COUNT, MOB_FLUSHED_CELLS_COUNT_DESC),
rsWrap.getMobFlushedCellsCount())
.addCounter(Interns.info(MOB_FLUSHED_CELLS_SIZE, MOB_FLUSHED_CELLS_SIZE_DESC),
rsWrap.getMobFlushedCellsSize())
.addCounter(Interns.info(MOB_SCAN_CELLS_COUNT, MOB_SCAN_CELLS_COUNT_DESC),
rsWrap.getMobScanCellsCount())
.addCounter(Interns.info(MOB_SCAN_CELLS_SIZE, MOB_SCAN_CELLS_SIZE_DESC),
rsWrap.getMobScanCellsSize())
.addCounter(Interns.info(MOB_FILE_CACHE_ACCESS_COUNT,
MOB_FILE_CACHE_ACCESS_COUNT_DESC), rsWrap.getMobFileCacheAccessCount())
.addCounter(Interns.info(MOB_FILE_CACHE_MISS_COUNT, MOB_FILE_CACHE_MISS_COUNT_DESC),
rsWrap.getMobFileCacheMissCount())
.addCounter(Interns.info(MOB_FILE_CACHE_EVICTED_COUNT,
MOB_FILE_CACHE_EVICTED_COUNT_DESC), rsWrap.getMobFileCacheEvictedCount())
.addCounter(Interns.info(HEDGED_READS, HEDGED_READS_DESC), rsWrap.getHedgedReadOps())
.addCounter(Interns.info(HEDGED_READ_WINS, HEDGED_READ_WINS_DESC),
rsWrap.getHedgedReadWins())
.addCounter(Interns.info(HEDGED_READ_IN_CUR_THREAD, HEDGED_READ_IN_CUR_THREAD_DESC),
rsWrap.getHedgedReadOpsInCurThread())
.addCounter(Interns.info(BLOCKED_REQUESTS_COUNT, BLOCKED_REQUESTS_COUNT_DESC),
rsWrap.getBlockedRequestsCount())
.tag(Interns.info(ZOOKEEPER_QUORUM_NAME, ZOOKEEPER_QUORUM_DESC),
rsWrap.getZookeeperQuorum())
.tag(Interns.info(SERVER_NAME_NAME, SERVER_NAME_DESC), rsWrap.getServerName())
.tag(Interns.info(CLUSTER_ID_NAME, CLUSTER_ID_DESC), rsWrap.getClusterId());
}
metricsRegistry.snapshot(mrb, all);
// source is registered in supers constructor, sometimes called before the whole initialization.
if (metricsAdapter != null) {
// snapshot MetricRegistry as well
metricsAdapter.snapshotAllMetrics(registry, mrb);
}
}
private MetricsRecordBuilder addGaugesToMetricsRecordBuilder(MetricsRecordBuilder mrb) {
return mrb.addGauge(Interns.info(REGION_COUNT, REGION_COUNT_DESC), rsWrap.getNumOnlineRegions())
.addGauge(Interns.info(STORE_COUNT, STORE_COUNT_DESC), rsWrap.getNumStores())
.addGauge(Interns.info(WALFILE_COUNT, WALFILE_COUNT_DESC), rsWrap.getNumWALFiles())
.addGauge(Interns.info(WALFILE_SIZE, WALFILE_SIZE_DESC), rsWrap.getWALFileSize())
.addGauge(Interns.info(STOREFILE_COUNT, STOREFILE_COUNT_DESC),
rsWrap.getNumStoreFiles())
.addGauge(Interns.info(MEMSTORE_SIZE, MEMSTORE_SIZE_DESC), rsWrap.getMemStoreSize())
.addGauge(Interns.info(STOREFILE_SIZE, STOREFILE_SIZE_DESC), rsWrap.getStoreFileSize())
.addGauge(Interns.info(MAX_STORE_FILE_AGE, MAX_STORE_FILE_AGE_DESC),
rsWrap.getMaxStoreFileAge())
.addGauge(Interns.info(MIN_STORE_FILE_AGE, MIN_STORE_FILE_AGE_DESC),
rsWrap.getMinStoreFileAge())
.addGauge(Interns.info(AVG_STORE_FILE_AGE, AVG_STORE_FILE_AGE_DESC),
rsWrap.getAvgStoreFileAge())
.addGauge(Interns.info(NUM_REFERENCE_FILES, NUM_REFERENCE_FILES_DESC),
rsWrap.getNumReferenceFiles())
.addGauge(Interns.info(RS_START_TIME_NAME, RS_START_TIME_DESC), rsWrap.getStartCode())
.addGauge(Interns.info(AVERAGE_REGION_SIZE, AVERAGE_REGION_SIZE_DESC),
rsWrap.getAverageRegionSize())
.addGauge(Interns.info(STOREFILE_INDEX_SIZE, STOREFILE_INDEX_SIZE_DESC),
rsWrap.getStoreFileIndexSize())
.addGauge(Interns.info(STATIC_INDEX_SIZE, STATIC_INDEX_SIZE_DESC),
rsWrap.getTotalStaticIndexSize())
.addGauge(Interns.info(STATIC_BLOOM_SIZE, STATIC_BLOOM_SIZE_DESC),
rsWrap.getTotalStaticBloomSize())
.addGauge(Interns.info(NUMBER_OF_MUTATIONS_WITHOUT_WAL,
NUMBER_OF_MUTATIONS_WITHOUT_WAL_DESC), rsWrap.getNumMutationsWithoutWAL())
.addGauge(Interns.info(DATA_SIZE_WITHOUT_WAL, DATA_SIZE_WITHOUT_WAL_DESC),
rsWrap.getDataInMemoryWithoutWAL())
.addGauge(Interns.info(PERCENT_FILES_LOCAL, PERCENT_FILES_LOCAL_DESC),
rsWrap.getPercentFileLocal())
.addGauge(Interns.info(PERCENT_FILES_LOCAL_SECONDARY_REGIONS,
PERCENT_FILES_LOCAL_SECONDARY_REGIONS_DESC),
rsWrap.getPercentFileLocalSecondaryRegions())
.addGauge(Interns.info(TOTAL_BYTES_READ,
TOTAL_BYTES_READ_DESC),
rsWrap.getTotalBytesRead())
.addGauge(Interns.info(LOCAL_BYTES_READ,
LOCAL_BYTES_READ_DESC),
rsWrap.getLocalBytesRead())
.addGauge(Interns.info(SHORTCIRCUIT_BYTES_READ,
SHORTCIRCUIT_BYTES_READ_DESC),
rsWrap.getShortCircuitBytesRead())
.addGauge(Interns.info(ZEROCOPY_BYTES_READ,
ZEROCOPY_BYTES_READ_DESC),
rsWrap.getZeroCopyBytesRead())
.addGauge(Interns.info(SPLIT_QUEUE_LENGTH, SPLIT_QUEUE_LENGTH_DESC),
rsWrap.getSplitQueueSize())
.addGauge(Interns.info(COMPACTION_QUEUE_LENGTH, COMPACTION_QUEUE_LENGTH_DESC),
rsWrap.getCompactionQueueSize())
.addGauge(Interns.info(SMALL_COMPACTION_QUEUE_LENGTH,
SMALL_COMPACTION_QUEUE_LENGTH_DESC), rsWrap.getSmallCompactionQueueSize())
.addGauge(Interns.info(LARGE_COMPACTION_QUEUE_LENGTH,
LARGE_COMPACTION_QUEUE_LENGTH_DESC), rsWrap.getLargeCompactionQueueSize())
.addGauge(Interns.info(FLUSH_QUEUE_LENGTH, FLUSH_QUEUE_LENGTH_DESC),
rsWrap.getFlushQueueSize())
.addGauge(Interns.info(BLOCK_CACHE_FREE_SIZE, BLOCK_CACHE_FREE_DESC),
rsWrap.getBlockCacheFreeSize())
.addGauge(Interns.info(BLOCK_CACHE_COUNT, BLOCK_CACHE_COUNT_DESC),
rsWrap.getBlockCacheCount())
.addGauge(Interns.info(BLOCK_CACHE_SIZE, BLOCK_CACHE_SIZE_DESC),
rsWrap.getBlockCacheSize())
.addGauge(Interns.info(BLOCK_CACHE_HIT_PERCENT, BLOCK_CACHE_HIT_PERCENT_DESC),
rsWrap.getBlockCacheHitPercent())
.addGauge(Interns.info(BLOCK_CACHE_EXPRESS_HIT_PERCENT,
BLOCK_CACHE_EXPRESS_HIT_PERCENT_DESC), rsWrap.getBlockCacheHitCachingPercent())
.addGauge(Interns.info(L1_CACHE_HIT_COUNT, L1_CACHE_HIT_COUNT_DESC),
rsWrap.getL1CacheHitCount())
.addGauge(Interns.info(L1_CACHE_MISS_COUNT, L1_CACHE_MISS_COUNT_DESC),
rsWrap.getL1CacheMissCount())
.addGauge(Interns.info(L1_CACHE_HIT_RATIO, L1_CACHE_HIT_RATIO_DESC),
rsWrap.getL1CacheHitRatio())
.addGauge(Interns.info(L1_CACHE_MISS_RATIO, L1_CACHE_MISS_RATIO_DESC),
rsWrap.getL1CacheMissRatio())
.addGauge(Interns.info(L2_CACHE_HIT_COUNT, L2_CACHE_HIT_COUNT_DESC),
rsWrap.getL2CacheHitCount())
.addGauge(Interns.info(L2_CACHE_MISS_COUNT, L2_CACHE_MISS_COUNT_DESC),
rsWrap.getL2CacheMissCount())
.addGauge(Interns.info(L2_CACHE_HIT_RATIO, L2_CACHE_HIT_RATIO_DESC),
rsWrap.getL2CacheHitRatio())
.addGauge(Interns.info(L2_CACHE_MISS_RATIO, L2_CACHE_MISS_RATIO_DESC),
rsWrap.getL2CacheMissRatio())
.addGauge(Interns.info(MOB_FILE_CACHE_COUNT, MOB_FILE_CACHE_COUNT_DESC),
rsWrap.getMobFileCacheCount())
.addGauge(Interns.info(MOB_FILE_CACHE_HIT_PERCENT, MOB_FILE_CACHE_HIT_PERCENT_DESC),
rsWrap.getMobFileCacheHitPercent())
.addGauge(Interns.info(READ_REQUEST_RATE_PER_SECOND, READ_REQUEST_RATE_DESC),
rsWrap.getReadRequestsRatePerSecond())
.addGauge(Interns.info(WRITE_REQUEST_RATE_PER_SECOND, WRITE_REQUEST_RATE_DESC),
rsWrap.getWriteRequestsRatePerSecond())
.addGauge(Interns.info(BYTE_BUFF_ALLOCATOR_HEAP_ALLOCATION_BYTES,
BYTE_BUFF_ALLOCATOR_HEAP_ALLOCATION_BYTES_DESC),
rsWrap.getByteBuffAllocatorHeapAllocationBytes())
.addGauge(Interns.info(BYTE_BUFF_ALLOCATOR_POOL_ALLOCATION_BYTES,
BYTE_BUFF_ALLOCATOR_POOL_ALLOCATION_BYTES_DESC),
rsWrap.getByteBuffAllocatorPoolAllocationBytes())
.addGauge(Interns.info(BYTE_BUFF_ALLOCATOR_HEAP_ALLOCATION_RATIO,
BYTE_BUFF_ALLOCATOR_HEAP_ALLOCATION_RATIO_DESC),
rsWrap.getByteBuffAllocatorHeapAllocRatio())
.addGauge(Interns.info(BYTE_BUFF_ALLOCATOR_TOTAL_BUFFER_COUNT,
BYTE_BUFF_ALLOCATOR_TOTAL_BUFFER_COUNT_DESC),
rsWrap.getByteBuffAllocatorTotalBufferCount())
.addGauge(Interns.info(BYTE_BUFF_ALLOCATOR_USED_BUFFER_COUNT,
BYTE_BUFF_ALLOCATOR_USED_BUFFER_COUNT_DESC),
rsWrap.getByteBuffAllocatorUsedBufferCount())
.addGauge(Interns.info(ACTIVE_SCANNERS, ACTIVE_SCANNERS_DESC),
rsWrap.getActiveScanners());
}
@Override
public void incInfoThresholdExceeded(int count) {
infoPauseThresholdExceeded.incr(count);
}
@Override
public void incWarnThresholdExceeded(int count) {
warnPauseThresholdExceeded.incr(count);
}
@Override
public void updatePauseTimeWithGc(long t) {
pausesWithGc.add(t);
}
@Override
public void updatePauseTimeWithoutGc(long t) {
pausesWithoutGc.add(t);
}
@Override
public void updateDeleteBatch(long t) {
deleteBatchHisto.add(t);
}
@Override
public void updateCheckAndDelete(long t) {
checkAndDeleteHisto.add(t);
}
@Override
public void updateCheckAndPut(long t) {
checkAndPutHisto.add(t);
}
@Override
public void updateCheckAndMutate(long t) {
checkAndMutateHisto.add(t);
}
@Override
public void updatePutBatch(long t) {
putBatchHisto.add(t);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.conf;
import org.apache.giraph.aggregators.AggregatorWriter;
import org.apache.giraph.aggregators.TextAggregatorWriter;
import org.apache.giraph.combiner.Combiner;
import org.apache.giraph.edge.ByteArrayEdges;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.graph.DefaultVertexResolver;
import org.apache.giraph.graph.DefaultVertexValueFactory;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.graph.VertexResolver;
import org.apache.giraph.graph.VertexValueFactory;
import org.apache.giraph.io.EdgeInputFormat;
import org.apache.giraph.io.VertexInputFormat;
import org.apache.giraph.io.VertexOutputFormat;
import org.apache.giraph.job.DefaultJobObserver;
import org.apache.giraph.job.GiraphJobObserver;
import org.apache.giraph.master.DefaultMasterCompute;
import org.apache.giraph.master.MasterCompute;
import org.apache.giraph.master.MasterObserver;
import org.apache.giraph.partition.DefaultPartitionContext;
import org.apache.giraph.partition.GraphPartitionerFactory;
import org.apache.giraph.partition.HashPartitionerFactory;
import org.apache.giraph.partition.Partition;
import org.apache.giraph.partition.PartitionContext;
import org.apache.giraph.partition.SimplePartition;
import org.apache.giraph.worker.DefaultWorkerContext;
import org.apache.giraph.worker.WorkerContext;
import org.apache.giraph.worker.WorkerObserver;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Constants used all over Giraph for configuration.
*/
// CHECKSTYLE: stop InterfaceIsTypeCheck
public interface GiraphConstants {
/** 1KB in bytes */
int ONE_KB = 1024;
/** Vertex class - required */
ClassConfOption<Vertex> VERTEX_CLASS =
ClassConfOption.create("giraph.vertexClass", null, Vertex.class);
/** Vertex value factory class - optional */
ClassConfOption<VertexValueFactory> VERTEX_VALUE_FACTORY_CLASS =
ClassConfOption.create("giraph.vertexValueFactoryClass",
DefaultVertexValueFactory.class, VertexValueFactory.class);
/** Vertex edges class - optional */
ClassConfOption<OutEdges> VERTEX_EDGES_CLASS =
ClassConfOption.create("giraph.outEdgesClass", ByteArrayEdges.class,
OutEdges.class);
/** Vertex edges class to be used during edge input only - optional */
ClassConfOption<OutEdges> INPUT_VERTEX_EDGES_CLASS =
ClassConfOption.create("giraph.inputOutEdgesClass",
ByteArrayEdges.class, OutEdges.class);
/** Class for Master - optional */
ClassConfOption<MasterCompute> MASTER_COMPUTE_CLASS =
ClassConfOption.create("giraph.masterComputeClass",
DefaultMasterCompute.class, MasterCompute.class);
/** Classes for Master Observer - optional */
ClassConfOption<MasterObserver> MASTER_OBSERVER_CLASSES =
ClassConfOption.create("giraph.master.observers",
null, MasterObserver.class);
/** Classes for Worker Observer - optional */
ClassConfOption<WorkerObserver> WORKER_OBSERVER_CLASSES =
ClassConfOption.create("giraph.worker.observers", null,
WorkerObserver.class);
/** Vertex combiner class - optional */
ClassConfOption<Combiner> VERTEX_COMBINER_CLASS =
ClassConfOption.create("giraph.combinerClass", null, Combiner.class);
/** Vertex resolver class - optional */
ClassConfOption<VertexResolver> VERTEX_RESOLVER_CLASS =
ClassConfOption.create("giraph.vertexResolverClass",
DefaultVertexResolver.class, VertexResolver.class);
/**
* Option of whether to create vertexes that were not existent before but
* received messages
*/
BooleanConfOption RESOLVER_CREATE_VERTEX_ON_MSGS =
new BooleanConfOption("giraph.vertex.resolver.create.on.msgs", true);
/** Graph partitioner factory class - optional */
ClassConfOption<GraphPartitionerFactory> GRAPH_PARTITIONER_FACTORY_CLASS =
ClassConfOption.create("giraph.graphPartitionerFactoryClass",
HashPartitionerFactory.class, GraphPartitionerFactory.class);
/** Observer class to watch over job status - optional */
ClassConfOption<GiraphJobObserver> JOB_OBSERVER_CLASS =
ClassConfOption.create("giraph.jobObserverClass",
DefaultJobObserver.class, GiraphJobObserver.class);
// At least one of the input format classes is required.
/** VertexInputFormat class */
ClassConfOption<VertexInputFormat> VERTEX_INPUT_FORMAT_CLASS =
ClassConfOption.create("giraph.vertexInputFormatClass", null,
VertexInputFormat.class);
/** EdgeInputFormat class */
ClassConfOption<EdgeInputFormat> EDGE_INPUT_FORMAT_CLASS =
ClassConfOption.create("giraph.edgeInputFormatClass", null,
EdgeInputFormat.class);
/** VertexOutputFormat class */
ClassConfOption<VertexOutputFormat> VERTEX_OUTPUT_FORMAT_CLASS =
ClassConfOption.create("giraph.vertexOutputFormatClass", null,
VertexOutputFormat.class);
/**
* If you use this option, instead of having saving vertices in the end of
* application, saveVertex will be called right after each vertex.compute()
* is called.
* NOTE: This feature doesn't work well with checkpointing - if you restart
* from a checkpoint you won't have any ouptut from previous supresteps.
*/
BooleanConfOption DO_OUTPUT_DURING_COMPUTATION =
new BooleanConfOption("giraph.doOutputDuringComputation", false);
/**
* Vertex output format thread-safe - if your VertexOutputFormat allows
* several vertexWriters to be created and written to in parallel,
* you should set this to true.
*/
BooleanConfOption VERTEX_OUTPUT_FORMAT_THREAD_SAFE =
new BooleanConfOption("giraph.vertexOutputFormatThreadSafe", false);
/** Number of threads for writing output in the end of the application */
IntConfOption NUM_OUTPUT_THREADS =
new IntConfOption("giraph.numOutputThreads", 1);
/** conf key for comma-separated list of jars to export to YARN workers */
StrConfOption GIRAPH_YARN_LIBJARS =
new StrConfOption("giraph.yarn.libjars", "");
/** Name of the XML file that will export our Configuration to YARN workers */
String GIRAPH_YARN_CONF_FILE = "giraph-conf.xml";
/** Giraph default heap size for all tasks when running on YARN profile */
int GIRAPH_YARN_TASK_HEAP_MB_DEFAULT = 1024;
/** Name of Giraph property for user-configurable heap memory per worker */
IntConfOption GIRAPH_YARN_TASK_HEAP_MB = new IntConfOption(
"giraph.yarn.task.heap.mb", GIRAPH_YARN_TASK_HEAP_MB_DEFAULT);
/** Default priority level in YARN for our task containers */
int GIRAPH_YARN_PRIORITY = 10;
/** Is this a pure YARN job (i.e. no MapReduce layer managing Giraph tasks) */
BooleanConfOption IS_PURE_YARN_JOB =
new BooleanConfOption("giraph.pure.yarn.job", false);
/** Vertex index class */
ClassConfOption<WritableComparable> VERTEX_ID_CLASS =
ClassConfOption.create("giraph.vertexIdClass", null,
WritableComparable.class);
/** Vertex value class */
ClassConfOption<Writable> VERTEX_VALUE_CLASS =
ClassConfOption.create("giraph.vertexValueClass", null, Writable.class);
/** Edge value class */
ClassConfOption<Writable> EDGE_VALUE_CLASS =
ClassConfOption.create("giraph.edgeValueClass", null, Writable.class);
/** Message value class */
ClassConfOption<Writable> MESSAGE_VALUE_CLASS =
ClassConfOption.create("giraph.messageValueClass", null, Writable.class);
/** Partition context class */
ClassConfOption<PartitionContext> PARTITION_CONTEXT_CLASS =
ClassConfOption.create("giraph.partitionContextClass",
DefaultPartitionContext.class, PartitionContext.class);
/** Worker context class */
ClassConfOption<WorkerContext> WORKER_CONTEXT_CLASS =
ClassConfOption.create("giraph.workerContextClass",
DefaultWorkerContext.class, WorkerContext.class);
/** AggregatorWriter class - optional */
ClassConfOption<AggregatorWriter> AGGREGATOR_WRITER_CLASS =
ClassConfOption.create("giraph.aggregatorWriterClass",
TextAggregatorWriter.class, AggregatorWriter.class);
/** Partition class - optional */
ClassConfOption<Partition> PARTITION_CLASS =
ClassConfOption.create("giraph.partitionClass", SimplePartition.class,
Partition.class);
/**
* Minimum number of simultaneous workers before this job can run (int)
*/
String MIN_WORKERS = "giraph.minWorkers";
/**
* Maximum number of simultaneous worker tasks started by this job (int).
*/
String MAX_WORKERS = "giraph.maxWorkers";
/**
* Separate the workers and the master tasks. This is required
* to support dynamic recovery. (boolean)
*/
BooleanConfOption SPLIT_MASTER_WORKER =
new BooleanConfOption("giraph.SplitMasterWorker", true);
/** Indicates whether this job is run in an internal unit test */
BooleanConfOption LOCAL_TEST_MODE =
new BooleanConfOption("giraph.localTestMode", false);
/** Override the Hadoop log level and set the desired log level. */
StrConfOption LOG_LEVEL = new StrConfOption("giraph.logLevel", "info");
/** Use thread level debugging? */
BooleanConfOption LOG_THREAD_LAYOUT =
new BooleanConfOption("giraph.logThreadLayout", false);
/** Configuration key to enable jmap printing */
BooleanConfOption JMAP_ENABLE =
new BooleanConfOption("giraph.jmap.histo.enable", false);
/** Configuration key for msec to sleep between calls */
IntConfOption JMAP_SLEEP_MILLIS =
new IntConfOption("giraph.jmap.histo.msec", SECONDS.toMillis(30));
/** Configuration key for how many lines to print */
IntConfOption JMAP_PRINT_LINES =
new IntConfOption("giraph.jmap.histo.print_lines", 30);
/**
* Minimum percent of the maximum number of workers that have responded
* in order to continue progressing. (float)
*/
FloatConfOption MIN_PERCENT_RESPONDED =
new FloatConfOption("giraph.minPercentResponded", 100.0f);
/** Enable the Metrics system **/
BooleanConfOption METRICS_ENABLE =
new BooleanConfOption("giraph.metrics.enable", false);
/**
* ZooKeeper comma-separated list (if not set,
* will start up ZooKeeper locally)
*/
String ZOOKEEPER_LIST = "giraph.zkList";
/** ZooKeeper session millisecond timeout */
IntConfOption ZOOKEEPER_SESSION_TIMEOUT =
new IntConfOption("giraph.zkSessionMsecTimeout", MINUTES.toMillis(1));
/** Polling interval to check for the ZooKeeper server data */
IntConfOption ZOOKEEPER_SERVERLIST_POLL_MSECS =
new IntConfOption("giraph.zkServerlistPollMsecs", SECONDS.toMillis(3));
/** Number of nodes (not tasks) to run Zookeeper on */
IntConfOption ZOOKEEPER_SERVER_COUNT =
new IntConfOption("giraph.zkServerCount", 1);
/** ZooKeeper port to use */
IntConfOption ZOOKEEPER_SERVER_PORT =
new IntConfOption("giraph.zkServerPort", 22181);
/** Location of the ZooKeeper jar - Used internally, not meant for users */
String ZOOKEEPER_JAR = "giraph.zkJar";
/** Local ZooKeeper directory to use */
String ZOOKEEPER_DIR = "giraph.zkDir";
/** Max attempts for handling ZooKeeper connection loss */
IntConfOption ZOOKEEPER_OPS_MAX_ATTEMPTS =
new IntConfOption("giraph.zkOpsMaxAttempts", 3);
/**
* Msecs to wait before retrying a failed ZooKeeper op due to connection loss.
*/
IntConfOption ZOOKEEPER_OPS_RETRY_WAIT_MSECS =
new IntConfOption("giraph.zkOpsRetryWaitMsecs", SECONDS.toMillis(5));
/** TCP backlog (defaults to number of workers) */
IntConfOption TCP_BACKLOG = new IntConfOption("giraph.tcpBacklog", 1);
/** How big to make the encoder buffer? */
IntConfOption NETTY_REQUEST_ENCODER_BUFFER_SIZE =
new IntConfOption("giraph.nettyRequestEncoderBufferSize", 32 * ONE_KB);
/** Whether or not netty request encoder should use direct byte buffers */
BooleanConfOption NETTY_REQUEST_ENCODER_USE_DIRECT_BUFFERS =
new BooleanConfOption("giraph.nettyRequestEncoderUseDirectBuffers",
false);
/** Netty client threads */
IntConfOption NETTY_CLIENT_THREADS =
new IntConfOption("giraph.nettyClientThreads", 4);
/** Netty server threads */
IntConfOption NETTY_SERVER_THREADS =
new IntConfOption("giraph.nettyServerThreads", 16);
/** Use the execution handler in netty on the client? */
BooleanConfOption NETTY_CLIENT_USE_EXECUTION_HANDLER =
new BooleanConfOption("giraph.nettyClientUseExecutionHandler", true);
/** Netty client execution threads (execution handler) */
IntConfOption NETTY_CLIENT_EXECUTION_THREADS =
new IntConfOption("giraph.nettyClientExecutionThreads", 8);
/** Where to place the netty client execution handle? */
StrConfOption NETTY_CLIENT_EXECUTION_AFTER_HANDLER =
new StrConfOption("giraph.nettyClientExecutionAfterHandler",
"requestEncoder");
/** Use the execution handler in netty on the server? */
BooleanConfOption NETTY_SERVER_USE_EXECUTION_HANDLER =
new BooleanConfOption("giraph.nettyServerUseExecutionHandler", true);
/** Netty server execution threads (execution handler) */
IntConfOption NETTY_SERVER_EXECUTION_THREADS =
new IntConfOption("giraph.nettyServerExecutionThreads", 8);
/** Where to place the netty server execution handle? */
StrConfOption NETTY_SERVER_EXECUTION_AFTER_HANDLER =
new StrConfOption("giraph.nettyServerExecutionAfterHandler",
"requestFrameDecoder");
/** Netty simulate a first request closed */
BooleanConfOption NETTY_SIMULATE_FIRST_REQUEST_CLOSED =
new BooleanConfOption("giraph.nettySimulateFirstRequestClosed", false);
/** Netty simulate a first response failed */
BooleanConfOption NETTY_SIMULATE_FIRST_RESPONSE_FAILED =
new BooleanConfOption("giraph.nettySimulateFirstResponseFailed", false);
/** Max resolve address attempts */
IntConfOption MAX_RESOLVE_ADDRESS_ATTEMPTS =
new IntConfOption("giraph.maxResolveAddressAttempts", 5);
/** Msecs to wait between waiting for all requests to finish */
IntConfOption WAITING_REQUEST_MSECS =
new IntConfOption("giraph.waitingRequestMsecs", SECONDS.toMillis(15));
/** Millseconds to wait for an event before continuing */
IntConfOption EVENT_WAIT_MSECS =
new IntConfOption("giraph.eventWaitMsecs", SECONDS.toMillis(30));
/**
* Maximum milliseconds to wait before giving up trying to get the minimum
* number of workers before a superstep (int).
*/
IntConfOption MAX_MASTER_SUPERSTEP_WAIT_MSECS =
new IntConfOption("giraph.maxMasterSuperstepWaitMsecs",
MINUTES.toMillis(10));
/** Milliseconds for a request to complete (or else resend) */
IntConfOption MAX_REQUEST_MILLISECONDS =
new IntConfOption("giraph.maxRequestMilliseconds", MINUTES.toMillis(10));
/** Netty max connection failures */
IntConfOption NETTY_MAX_CONNECTION_FAILURES =
new IntConfOption("giraph.nettyMaxConnectionFailures", 1000);
/** Initial port to start using for the IPC communication */
IntConfOption IPC_INITIAL_PORT =
new IntConfOption("giraph.ipcInitialPort", 30000);
/** Maximum bind attempts for different IPC ports */
IntConfOption MAX_IPC_PORT_BIND_ATTEMPTS =
new IntConfOption("giraph.maxIpcPortBindAttempts", 20);
/**
* Fail first IPC port binding attempt, simulate binding failure
* on real grid testing
*/
BooleanConfOption FAIL_FIRST_IPC_PORT_BIND_ATTEMPT =
new BooleanConfOption("giraph.failFirstIpcPortBindAttempt", false);
/** Client send buffer size */
IntConfOption CLIENT_SEND_BUFFER_SIZE =
new IntConfOption("giraph.clientSendBufferSize", 512 * ONE_KB);
/** Client receive buffer size */
IntConfOption CLIENT_RECEIVE_BUFFER_SIZE =
new IntConfOption("giraph.clientReceiveBufferSize", 32 * ONE_KB);
/** Server send buffer size */
IntConfOption SERVER_SEND_BUFFER_SIZE =
new IntConfOption("giraph.serverSendBufferSize", 32 * ONE_KB);
/** Server receive buffer size */
IntConfOption SERVER_RECEIVE_BUFFER_SIZE =
new IntConfOption("giraph.serverReceiveBufferSize", 512 * ONE_KB);
/** Maximum size of messages (in bytes) per peer before flush */
IntConfOption MAX_MSG_REQUEST_SIZE =
new IntConfOption("giraph.msgRequestSize", 512 * ONE_KB);
/**
* How much bigger than the average per partition size to make initial per
* partition buffers.
* If this value is A, message request size is M,
* and a worker has P partitions, than its initial partition buffer size
* will be (M / P) * (1 + A).
*/
FloatConfOption ADDITIONAL_MSG_REQUEST_SIZE =
new FloatConfOption("giraph.additionalMsgRequestSize", 0.2f);
/** Maximum size of edges (in bytes) per peer before flush */
IntConfOption MAX_EDGE_REQUEST_SIZE =
new IntConfOption("giraph.edgeRequestSize", 512 * ONE_KB);
/**
* Additional size (expressed as a ratio) of each per-partition buffer on
* top of the average size.
*/
FloatConfOption ADDITIONAL_EDGE_REQUEST_SIZE =
new FloatConfOption("giraph.additionalEdgeRequestSize", 0.2f);
/** Maximum number of mutations per partition before flush */
IntConfOption MAX_MUTATIONS_PER_REQUEST =
new IntConfOption("giraph.maxMutationsPerRequest", 100);
/**
* Whether we should reuse the same Edge object when adding edges from
* requests.
* This works with edge storage implementations that don't keep references
* to the input Edge objects (e.g., ByteArrayVertex).
*/
BooleanConfOption REUSE_INCOMING_EDGE_OBJECTS =
new BooleanConfOption("giraph.reuseIncomingEdgeObjects", false);
/**
* Use message size encoding (typically better for complex objects,
* not meant for primitive wrapped messages)
*/
BooleanConfOption USE_MESSAGE_SIZE_ENCODING =
new BooleanConfOption("giraph.useMessageSizeEncoding", false);
/** Number of channels used per server */
IntConfOption CHANNELS_PER_SERVER =
new IntConfOption("giraph.channelsPerServer", 1);
/** Number of flush threads per peer */
String MSG_NUM_FLUSH_THREADS = "giraph.msgNumFlushThreads";
/** Number of threads for vertex computation */
IntConfOption NUM_COMPUTE_THREADS =
new IntConfOption("giraph.numComputeThreads", 1);
/** Number of threads for input split loading */
IntConfOption NUM_INPUT_THREADS =
new IntConfOption("giraph.numInputThreads", 1);
/** Minimum stragglers of the superstep before printing them out */
IntConfOption PARTITION_LONG_TAIL_MIN_PRINT =
new IntConfOption("giraph.partitionLongTailMinPrint", 1);
/** Use superstep counters? (boolean) */
BooleanConfOption USE_SUPERSTEP_COUNTERS =
new BooleanConfOption("giraph.useSuperstepCounters", true);
/**
* Input split sample percent - Used only for sampling and testing, rather
* than an actual job. The idea is that to test, you might only want a
* fraction of the actual input splits from your VertexInputFormat to
* load (values should be [0, 100]).
*/
FloatConfOption INPUT_SPLIT_SAMPLE_PERCENT =
new FloatConfOption("giraph.inputSplitSamplePercent", 100f);
/**
* To limit outlier vertex input splits from producing too many vertices or
* to help with testing, the number of vertices loaded from an input split
* can be limited. By default, everything is loaded.
*/
LongConfOption INPUT_SPLIT_MAX_VERTICES =
new LongConfOption("giraph.InputSplitMaxVertices", -1);
/**
* To limit outlier vertex input splits from producing too many vertices or
* to help with testing, the number of edges loaded from an input split
* can be limited. By default, everything is loaded.
*/
LongConfOption INPUT_SPLIT_MAX_EDGES =
new LongConfOption("giraph.InputSplitMaxEdges", -1);
/**
* To minimize network usage when reading input splits,
* each worker can prioritize splits that reside on its host.
* This, however, comes at the cost of increased load on ZooKeeper.
* Hence, users with a lot of splits and input threads (or with
* configurations that can't exploit locality) may want to disable it.
*/
BooleanConfOption USE_INPUT_SPLIT_LOCALITY =
new BooleanConfOption("giraph.useInputSplitLocality", true);
/** Multiplier for the current workers squared */
FloatConfOption PARTITION_COUNT_MULTIPLIER =
new FloatConfOption("giraph.masterPartitionCountMultiplier", 1.0f);
/** Overrides default partition count calculation if not -1 */
IntConfOption USER_PARTITION_COUNT =
new IntConfOption("giraph.userPartitionCount", -1);
/** Vertex key space size for
* {@link org.apache.giraph.partition.SimpleRangeWorkerPartitioner}
*/
String PARTITION_VERTEX_KEY_SPACE_SIZE = "giraph.vertexKeySpaceSize";
/** Java opts passed to ZooKeeper startup */
StrConfOption ZOOKEEPER_JAVA_OPTS =
new StrConfOption("giraph.zkJavaOpts",
"-Xmx512m -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC " +
"-XX:CMSInitiatingOccupancyFraction=70 -XX:MaxGCPauseMillis=100");
/**
* How often to checkpoint (i.e. 0, means no checkpoint,
* 1 means every superstep, 2 is every two supersteps, etc.).
*/
IntConfOption CHECKPOINT_FREQUENCY =
new IntConfOption("giraph.checkpointFrequency", 0);
/**
* Delete checkpoints after a successful job run?
*/
BooleanConfOption CLEANUP_CHECKPOINTS_AFTER_SUCCESS =
new BooleanConfOption("giraph.cleanupCheckpointsAfterSuccess", true);
/**
* An application can be restarted manually by selecting a superstep. The
* corresponding checkpoint must exist for this to work. The user should
* set a long value. Default is start from scratch.
*/
String RESTART_SUPERSTEP = "giraph.restartSuperstep";
/**
* Base ZNode for Giraph's state in the ZooKeeper cluster. Must be a root
* znode on the cluster beginning with "/"
*/
String BASE_ZNODE_KEY = "giraph.zkBaseZNode";
/**
* If ZOOKEEPER_LIST is not set, then use this directory to manage
* ZooKeeper
*/
StrConfOption ZOOKEEPER_MANAGER_DIRECTORY =
new StrConfOption("giraph.zkManagerDirectory",
"_bsp/_defaultZkManagerDir");
/** Number of ZooKeeper client connection attempts before giving up. */
IntConfOption ZOOKEEPER_CONNECTION_ATTEMPTS =
new IntConfOption("giraph.zkConnectionAttempts", 10);
/** This directory has/stores the available checkpoint files in HDFS. */
StrConfOption CHECKPOINT_DIRECTORY =
new StrConfOption("giraph.checkpointDirectory", "_bsp/_checkpoints/");
/**
* Comma-separated list of directories in the local file system for
* out-of-core messages.
*/
StrConfOption MESSAGES_DIRECTORY =
new StrConfOption("giraph.messagesDirectory", "_bsp/_messages/");
/** Whether or not to use out-of-core messages */
BooleanConfOption USE_OUT_OF_CORE_MESSAGES =
new BooleanConfOption("giraph.useOutOfCoreMessages", false);
/**
* If using out-of-core messaging, it tells how much messages do we keep
* in memory.
*/
IntConfOption MAX_MESSAGES_IN_MEMORY =
new IntConfOption("giraph.maxMessagesInMemory", 1000000);
/** Size of buffer when reading and writing messages out-of-core. */
IntConfOption MESSAGES_BUFFER_SIZE =
new IntConfOption("giraph.messagesBufferSize", 8 * ONE_KB);
/**
* Comma-separated list of directories in the local filesystem for
* out-of-core partitions.
*/
StrConfOption PARTITIONS_DIRECTORY =
new StrConfOption("giraph.partitionsDirectory", "_bsp/_partitions");
/** Enable out-of-core graph. */
BooleanConfOption USE_OUT_OF_CORE_GRAPH =
new BooleanConfOption("giraph.useOutOfCoreGraph", false);
/** Maximum number of partitions to hold in memory for each worker. */
IntConfOption MAX_PARTITIONS_IN_MEMORY =
new IntConfOption("giraph.maxPartitionsInMemory", 10);
/** Keep the zookeeper output for debugging? Default is to remove it. */
BooleanConfOption KEEP_ZOOKEEPER_DATA =
new BooleanConfOption("giraph.keepZooKeeperData", false);
/** Default ZooKeeper tick time. */
int DEFAULT_ZOOKEEPER_TICK_TIME = 6000;
/** Default ZooKeeper init limit (in ticks). */
int DEFAULT_ZOOKEEPER_INIT_LIMIT = 10;
/** Default ZooKeeper sync limit (in ticks). */
int DEFAULT_ZOOKEEPER_SYNC_LIMIT = 5;
/** Default ZooKeeper snap count. */
int DEFAULT_ZOOKEEPER_SNAP_COUNT = 50000;
/** Default ZooKeeper maximum client connections. */
int DEFAULT_ZOOKEEPER_MAX_CLIENT_CNXNS = 10000;
/** ZooKeeper minimum session timeout */
IntConfOption ZOOKEEPER_MIN_SESSION_TIMEOUT =
new IntConfOption("giraph.zKMinSessionTimeout", MINUTES.toMillis(10));
/** ZooKeeper maximum session timeout */
IntConfOption ZOOKEEPER_MAX_SESSION_TIMEOUT =
new IntConfOption("giraph.zkMaxSessionTimeout", MINUTES.toMillis(15));
/** ZooKeeper force sync */
BooleanConfOption ZOOKEEPER_FORCE_SYNC =
new BooleanConfOption("giraph.zKForceSync", false);
/** ZooKeeper skip ACLs */
BooleanConfOption ZOOKEEPER_SKIP_ACL =
new BooleanConfOption("giraph.ZkSkipAcl", true);
/**
* Whether to use SASL with DIGEST and Hadoop Job Tokens to authenticate
* and authorize Netty BSP Clients to Servers.
*/
BooleanConfOption AUTHENTICATE =
new BooleanConfOption("giraph.authenticate", false);
/** Use unsafe serialization? */
BooleanConfOption USE_UNSAFE_SERIALIZATION =
new BooleanConfOption("giraph.useUnsafeSerialization", true);
/**
* Maximum number of attempts a master/worker will retry before killing
* the job. This directly maps to the number of map task attempts in
* Hadoop.
*/
IntConfOption MAX_TASK_ATTEMPTS =
new IntConfOption("mapred.map.max.attempts", -1);
/** Interface to use for hostname resolution */
StrConfOption DNS_INTERFACE =
new StrConfOption("giraph.dns.interface", "default");
/** Server for hostname resolution */
StrConfOption DNS_NAMESERVER =
new StrConfOption("giraph.dns.nameserver", "default");
/**
* The application will halt after this many supersteps is completed. For
* instance, if it is set to 3, the application will run at most 0, 1,
* and 2 supersteps and then go into the shutdown superstep.
*/
IntConfOption MAX_NUMBER_OF_SUPERSTEPS =
new IntConfOption("giraph.maxNumberOfSupersteps", 1);
/**
* The application will not mutate the graph topology (the edges). It is used
* to optimise out-of-core graph, by not writing back edges every time.
*/
BooleanConfOption STATIC_GRAPH =
new BooleanConfOption("giraph.isStaticGraph", false);
/**
* The following configurations are used for graph extraction.
*/
StrConfOption INPUT_RAW_QUERY_PATH =
new StrConfOption("grapholap.input.raw.query.path", "/user/simon/test/rawquery/test-4-query");
BooleanConfOption GRAPHOLAP_PATH_EVALUATION =
new BooleanConfOption("grapholap.path.evaluation", false);
BooleanConfOption GRAPHOLAP_PATH_EVALUATION_IMPR =
new BooleanConfOption("grapholap.path.evaluation.impr", false);
BooleanConfOption GRAPHOLAP_PATH_EVALUATION_IMPR_ARRAY =
new BooleanConfOption("grapholap.path.evaluation.impr.array", false);
BooleanConfOption GRAPHOLAP_PATH_EVALUATION_IMPR_NORMAL =
new BooleanConfOption("grapholap.path.evaluation.impr.normal", false);
BooleanConfOption GRAPHOLAP_PATH_EVALUATION_QUERY_SHARING =
new BooleanConfOption("grapholap.path.evaluation.query.sharing", false);
StrConfOption GRAPHOLAP_PATH_EVALUATION_PLAN =
new StrConfOption("grapholap.path.evaluation.plan", "tree");
}
// CHECKSTYLE: resume InterfaceIsTypeCheck
| |
package com.bison.transition.compat;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.os.Build;
import android.util.Property;
import android.view.View;
import android.view.ViewGroup;
import com.bison.transition.R;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* Created by oeager on 2015/11/11.
* email:oeager@foxmail.com
*/
public final class ViewCompat {
private static final ViewFace IMPL;
static {
final int version = Build.VERSION.SDK_INT;
if (version >= Build.VERSION_CODES.LOLLIPOP) {
IMPL = new LollipopViewImp();
} else if (version >= Build.VERSION_CODES.KITKAT) {
IMPL = new KitkatViewImp();
} else if (version >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
IMPL = new JellyBeanMR2ViewImp();
} else if (version >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
IMPL = new JellyBeanMR1ViewImp();
} else if (version >= Build.VERSION_CODES.JELLY_BEAN) {
IMPL = new JellyBeanViewImp();
} else {
IMPL = new DefaultViewImp();
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public static class JellyBeanMR2ViewImp extends JellyBeanMR1ViewImp{
@Override
public void setClipBounds(View v, Rect clipBounds) {
v.setClipBounds(clipBounds);
}
@Override
public Rect getClipBounds(View v) {
return v.getClipBounds();
}
@Override
public Object getWindowId(View view) {
return view.getWindowId();
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
static class JellyBeanMR1ViewImp extends JellyBeanViewImp{
@Override
public boolean isRtl(View view) {
return view != null && view.getLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
static class JellyBeanViewImp extends DefaultViewImp{
@Override
public void setHasTransientState(View view, boolean hasTransientState) {
view.setHasTransientState(hasTransientState);
}
@Override
public boolean hasTransientState(View view) {
return view.hasTransientState();
}
}
static class DefaultViewImp implements ViewFace{
private static final Method METHOD_SET_FRAME =
ReflectionTool.getPrivateMethod(View.class, "setFrame", int.class, int.class,
int.class, int.class);
private static final Field FIELD_VIEW_FLAGS =
ReflectionTool.getPrivateField(View.class, "mViewFlags");
private static final int VIEW_VISIBILITY_MASK = 0x0000000C;
@Override
public void setLeftTopRightBottom(View v, int left, int top, int right, int bottom) {
ReflectionTool.invoke(v, null, METHOD_SET_FRAME, left, top, right, bottom);
}
@Override
public void setTransitionAlpha(View v, float alpha) {
v.setAlpha(alpha);
}
@Override
public float getTransitionAlpha(View v) {
return v.getAlpha();
}
@Override
public boolean isLaidOut(View v, boolean defaultValue) {
return defaultValue;
}
public void setClipBounds(View v, Rect clipBounds) {
// TODO: Implement support behavior
}
public Rect getClipBounds(View v) {
// TODO: Implement support behavior
return null;
}
public void setTransitionName(View v, String name) {
v.setTag(R.id.transitionName, name);
}
public String getTransitionName(View v) {
return (String) v.getTag(R.id.transitionName);
}
public boolean isTransitionAlphaCompatMode() {
return true;
}
public Property<View, Float> getAlphaProperty() {
return View.ALPHA;
}
public void setTranslationZ(View view, float z) {
// do nothing
}
public float getTranslationZ(View view) {
return 0;
}
public View addGhostView(View view, ViewGroup viewGroup, Matrix matrix) {
return null;
}
public void removeGhostView(View view) {
// do nothing
}
public void transformMatrixToGlobal(View view, Matrix matrix) {
// TODO: Implement support behavior
}
public void transformMatrixToLocal(View v, Matrix matrix) {
// TODO: Implement support behavior
}
public void setAnimationMatrix(View view, Matrix matrix) {
// TODO: Implement support behavior
}
public Object getWindowId(View view) {
return null;
}
public boolean isRtl(View view) {
return false;
}
public void setHasTransientState(View view, boolean hasTransientState) {
// do nothing; API doesn't exist
}
public boolean hasTransientState(View view) {
return false;
}
public void setTransitionVisibility(View v, int visibility) {
int value = (Integer) ReflectionTool.getFieldValue(v, 0, FIELD_VIEW_FLAGS);
value = (value & ~VIEW_VISIBILITY_MASK) | visibility;
ReflectionTool.setFieldValue(v, FIELD_VIEW_FLAGS, value);
}
}
public static void setLeftTopRightBottom(View view, int left, int top, int right, int bottom) {
IMPL.setLeftTopRightBottom(view, left, top, right, bottom);
}
public static void setTransitionAlpha(View view,float alpha){
IMPL.setTransitionAlpha(view, alpha);
}
public static float getTransitionAlpha(View view){
return IMPL.getTransitionAlpha(view);
}
public static boolean isLaidOut(View v, boolean defaultValue) {
return IMPL.isLaidOut(v, defaultValue);
}
public static Rect getClipBounds(View v) {
return IMPL.getClipBounds(v);
}
public static boolean isTransitionAlphaCompatMode() {
return IMPL.isTransitionAlphaCompatMode();
}
public static Property<View, Float> getAlphaProperty() {
return IMPL.getAlphaProperty();
}
public static void setTransitionName(View v, String name) {
IMPL.setTransitionName(v, name);
}
public static String getTransitionName(View v) {
return IMPL.getTransitionName(v);
}
public static float getTranslationZ(View view) {
return IMPL.getTranslationZ(view);
}
public static void setTranslationZ(View view, float z) {
IMPL.setTranslationZ(view, z);
}
public static void transformMatrixToGlobal(View view, Matrix matrix) {
IMPL.transformMatrixToGlobal(view, matrix);
}
public static void transformMatrixToLocal(View view, Matrix matrix) {
IMPL.transformMatrixToLocal(view, matrix);
}
public static void setAnimationMatrix(View view, Matrix matrix) {
IMPL.setAnimationMatrix(view, matrix);
}
public static View addGhostView(View view, ViewGroup viewGroup, Matrix matrix) {
return IMPL.addGhostView(view, viewGroup, matrix);
}
public static void removeGhostView(View view) {
IMPL.removeGhostView(view);
}
public static Object getWindowId(View view) {
return IMPL.getWindowId(view);
}
public static boolean isRtl(View view) {
return IMPL.isRtl(view);
}
public static boolean hasTransientState(View view) {
return IMPL.hasTransientState(view);
}
public static void setHasTransientState(View view, boolean hasTransientState) {
IMPL.setHasTransientState(view, hasTransientState);
}
/**
* Change the visibility of the View without triggering any other changes. This is
* important for transitions, where visibility changes should not adjust focus or
* trigger a new layout. This is only used when the visibility has already been changed
* and we need a transient value during an animation. When the animation completes,
* the original visibility value is always restored.
*
* @param visibility One of View.VISIBLE, View.INVISIBLE, or View.GONE.
*/
public static void setTransitionVisibility(View v, int visibility) {
IMPL.setTransitionVisibility(v, visibility);
}
public static void setClipBounds(View v, Rect clipBounds) {
IMPL.setClipBounds(v, clipBounds);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.http2;
import java.io.File;
import java.io.IOException;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import jakarta.servlet.ServletConnection;
import jakarta.servlet.http.HttpServletResponse;
import org.apache.coyote.AbstractProcessor;
import org.apache.coyote.ActionCode;
import org.apache.coyote.Adapter;
import org.apache.coyote.ContinueResponseTiming;
import org.apache.coyote.ErrorState;
import org.apache.coyote.Request;
import org.apache.coyote.RequestGroupInfo;
import org.apache.coyote.Response;
import org.apache.coyote.http11.filters.GzipOutputFilter;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.buf.ByteChunk;
import org.apache.tomcat.util.http.FastHttpDateFormat;
import org.apache.tomcat.util.http.MimeHeaders;
import org.apache.tomcat.util.http.parser.HttpParser;
import org.apache.tomcat.util.net.AbstractEndpoint.Handler.SocketState;
import org.apache.tomcat.util.net.DispatchType;
import org.apache.tomcat.util.net.SendfileState;
import org.apache.tomcat.util.net.SocketEvent;
import org.apache.tomcat.util.net.SocketWrapperBase;
import org.apache.tomcat.util.res.StringManager;
class StreamProcessor extends AbstractProcessor {
private static final Log log = LogFactory.getLog(StreamProcessor.class);
private static final StringManager sm = StringManager.getManager(StreamProcessor.class);
private static final Set<String> H2_PSEUDO_HEADERS_REQUEST = new HashSet<>();
private final Http2UpgradeHandler handler;
private final Stream stream;
private SendfileData sendfileData = null;
private SendfileState sendfileState = null;
static {
H2_PSEUDO_HEADERS_REQUEST.add(":method");
H2_PSEUDO_HEADERS_REQUEST.add(":scheme");
H2_PSEUDO_HEADERS_REQUEST.add(":authority");
H2_PSEUDO_HEADERS_REQUEST.add(":path");
}
StreamProcessor(Http2UpgradeHandler handler, Stream stream, Adapter adapter,
SocketWrapperBase<?> socketWrapper) {
super(adapter, stream.getCoyoteRequest(), stream.getCoyoteResponse());
this.handler = handler;
this.stream = stream;
setSocketWrapper(socketWrapper);
}
final void process(SocketEvent event) {
try {
// FIXME: the regular processor syncs on socketWrapper, but here this deadlocks
synchronized (this) {
// HTTP/2 equivalent of AbstractConnectionHandler#process() without the
// socket <-> processor mapping
SocketState state = SocketState.CLOSED;
try {
state = process(socketWrapper, event);
if (state == SocketState.LONG) {
handler.getProtocol().getHttp11Protocol().addWaitingProcessor(this);
} else if (state == SocketState.CLOSED) {
handler.getProtocol().getHttp11Protocol().removeWaitingProcessor(this);
if (!stream.isInputFinished() && getErrorState().isIoAllowed()) {
// The request has been processed but the request body has not been
// fully read. This typically occurs when Tomcat rejects an upload
// of some form (e.g. PUT or POST). Need to tell the client not to
// send any more data on this stream (reset).
StreamException se = new StreamException(
sm.getString("streamProcessor.cancel", stream.getConnectionId(),
stream.getIdAsString()), Http2Error.CANCEL, stream.getIdAsInt());
stream.close(se);
} else if (!getErrorState().isConnectionIoAllowed()) {
ConnectionException ce = new ConnectionException(sm.getString(
"streamProcessor.error.connection", stream.getConnectionId(),
stream.getIdAsString()), Http2Error.INTERNAL_ERROR);
stream.close(ce);
} else if (!getErrorState().isIoAllowed()) {
StreamException se = stream.getResetException();
if (se == null) {
se = new StreamException(sm.getString(
"streamProcessor.error.stream", stream.getConnectionId(),
stream.getIdAsString()), Http2Error.INTERNAL_ERROR,
stream.getIdAsInt());
}
stream.close(se);
} else {
if (!stream.isActive()) {
// stream.close() will call recycle so only need it here
stream.recycle();
}
}
}
} catch (Exception e) {
String msg = sm.getString("streamProcessor.error.connection",
stream.getConnectionId(), stream.getIdAsString());
if (log.isDebugEnabled()) {
log.debug(msg, e);
}
ConnectionException ce = new ConnectionException(msg, Http2Error.INTERNAL_ERROR, e);
stream.close(ce);
state = SocketState.CLOSED;
} finally {
if (state == SocketState.CLOSED) {
recycle();
}
}
}
} finally {
handler.executeQueuedStream();
}
}
@Override
protected final void prepareResponse() throws IOException {
response.setCommitted(true);
if (handler.hasAsyncIO() && handler.getProtocol().getUseSendfile()) {
prepareSendfile();
}
prepareHeaders(request, response, sendfileData == null, handler.getProtocol(), stream);
stream.writeHeaders();
}
private void prepareSendfile() {
String fileName = (String) stream.getCoyoteRequest().getAttribute(
org.apache.coyote.Constants.SENDFILE_FILENAME_ATTR);
if (fileName != null) {
sendfileData = new SendfileData();
sendfileData.path = new File(fileName).toPath();
sendfileData.pos = ((Long) stream.getCoyoteRequest().getAttribute(
org.apache.coyote.Constants.SENDFILE_FILE_START_ATTR)).longValue();
sendfileData.end = ((Long) stream.getCoyoteRequest().getAttribute(
org.apache.coyote.Constants.SENDFILE_FILE_END_ATTR)).longValue();
sendfileData.left = sendfileData.end - sendfileData.pos;
sendfileData.stream = stream;
}
}
// Static so it can be used by Stream to build the MimeHeaders required for
// an ACK. For that use case coyoteRequest, protocol and stream will be null.
static void prepareHeaders(Request coyoteRequest, Response coyoteResponse, boolean noSendfile,
Http2Protocol protocol, Stream stream) {
MimeHeaders headers = coyoteResponse.getMimeHeaders();
int statusCode = coyoteResponse.getStatus();
// Add the pseudo header for status
headers.addValue(":status").setString(Integer.toString(statusCode));
// Compression can't be used with sendfile
// Need to check for compression (and set headers appropriately) before
// adding headers below
if (noSendfile && protocol != null &&
protocol.useCompression(coyoteRequest, coyoteResponse)) {
// Enable compression. Headers will have been set. Need to configure
// output filter at this point.
stream.addOutputFilter(new GzipOutputFilter());
}
// Check to see if a response body is present
if (!(statusCode < 200 || statusCode == 204 || statusCode == 205 || statusCode == 304)) {
String contentType = coyoteResponse.getContentType();
if (contentType != null) {
headers.setValue("content-type").setString(contentType);
}
String contentLanguage = coyoteResponse.getContentLanguage();
if (contentLanguage != null) {
headers.setValue("content-language").setString(contentLanguage);
}
// Add a content-length header if a content length has been set unless
// the application has already added one
long contentLength = coyoteResponse.getContentLengthLong();
if (contentLength != -1 && headers.getValue("content-length") == null) {
headers.addValue("content-length").setLong(contentLength);
}
} else {
if (statusCode == 205) {
// RFC 7231 requires the server to explicitly signal an empty
// response in this case
coyoteResponse.setContentLength(0);
} else {
coyoteResponse.setContentLength(-1);
}
}
// Add date header unless it is an informational response or the
// application has already set one
if (statusCode >= 200 && headers.getValue("date") == null) {
headers.addValue("date").setString(FastHttpDateFormat.getCurrentDate());
}
}
@Override
protected final void finishResponse() throws IOException {
sendfileState = handler.processSendfile(sendfileData);
if (!(sendfileState == SendfileState.PENDING)) {
stream.getOutputBuffer().end();
}
}
@Override
protected final void ack(ContinueResponseTiming continueResponseTiming) {
// Only try and send the ACK for ALWAYS or if the timing of the request
// to send the ACK matches the current configuration.
if (continueResponseTiming == ContinueResponseTiming.ALWAYS ||
continueResponseTiming == handler.getProtocol().getContinueResponseTimingInternal()) {
if (!response.isCommitted() && request.hasExpectation()) {
try {
stream.writeAck();
} catch (IOException ioe) {
setErrorState(ErrorState.CLOSE_CONNECTION_NOW, ioe);
}
}
}
}
@Override
protected final void flush() throws IOException {
stream.getOutputBuffer().flush();
}
@Override
protected final int available(boolean doRead) {
return stream.getInputBuffer().available();
}
@Override
protected final void setRequestBody(ByteChunk body) {
stream.getInputBuffer().insertReplayedBody(body);
try {
stream.receivedEndOfStream();
} catch (ConnectionException e) {
// Exception will not be thrown in this case
}
}
@Override
protected final void setSwallowResponse() {
// NO-OP
}
@Override
protected final void disableSwallowRequest() {
// NO-OP
// HTTP/2 has to swallow any input received to ensure that the flow
// control windows are correctly tracked.
}
@Override
protected void processSocketEvent(SocketEvent event, boolean dispatch) {
if (dispatch) {
handler.processStreamOnContainerThread(this, event);
} else {
this.process(event);
}
}
@Override
protected final boolean isReadyForRead() {
return stream.getInputBuffer().isReadyForRead();
}
@Override
protected final boolean isRequestBodyFullyRead() {
return stream.getInputBuffer().isRequestBodyFullyRead();
}
@Override
protected final void registerReadInterest() {
// Should never be called for StreamProcessor as isReadyForRead() is
// overridden
throw new UnsupportedOperationException();
}
@Override
protected final boolean isReadyForWrite() {
return stream.isReadyForWrite();
}
@Override
protected final void executeDispatches() {
Iterator<DispatchType> dispatches = getIteratorAndClearDispatches();
/*
* Compare with superclass that uses SocketWrapper
* A sync is not necessary here as the window sizes are updated with
* syncs before the dispatches are executed and it is the window size
* updates that need to be complete before the dispatch executes.
*/
while (dispatches != null && dispatches.hasNext()) {
DispatchType dispatchType = dispatches.next();
/*
* Dispatch on new thread.
* Firstly, this avoids a deadlock on the SocketWrapper as Streams
* being processed by container threads lock the SocketProcessor
* before they lock the SocketWrapper which is the opposite order to
* container threads processing via Http2UpgrageHandler.
* Secondly, this code executes after a Window update has released
* one or more Streams. By dispatching each Stream to a dedicated
* thread, those Streams may progress concurrently.
*/
processSocketEvent(dispatchType.getSocketStatus(), true);
}
}
@Override
protected final boolean isPushSupported() {
return stream.isPushSupported();
}
@Override
protected final void doPush(Request pushTarget) {
try {
stream.push(pushTarget);
} catch (IOException ioe) {
setErrorState(ErrorState.CLOSE_CONNECTION_NOW, ioe);
response.setErrorException(ioe);
}
}
@Override
protected boolean isTrailerFieldsReady() {
return stream.isTrailerFieldsReady();
}
@Override
protected boolean isTrailerFieldsSupported() {
return stream.isTrailerFieldsSupported();
}
@Override
protected String getProtocolRequestId() {
return stream.getIdAsString();
}
@Override
public final void recycle() {
// StreamProcessor instances are not re-used.
// Calling removeRequestProcessor even though the RequestProcesser was
// never added will add the values from the RequestProcessor to the
// running total for the GlobalRequestProcessor
RequestGroupInfo global = handler.getProtocol().getGlobal();
if (global != null) {
global.removeRequestProcessor(request.getRequestProcessor());
}
// Clear fields that can be cleared to aid GC and trigger NPEs if this
// is reused
setSocketWrapper(null);
}
@Override
protected final Log getLog() {
return log;
}
@Override
protected ServletConnection getServletConnection() {
return handler.getServletConnection();
}
@Override
public final void pause() {
// NO-OP. Handled by the Http2UpgradeHandler
}
@Override
public final SocketState service(SocketWrapperBase<?> socket) throws IOException {
try {
if (validateRequest()) {
adapter.service(request, response);
} else {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
adapter.log(request, response, 0);
setErrorState(ErrorState.CLOSE_CLEAN, null);
}
} catch (Exception e) {
if (log.isDebugEnabled()) {
log.debug(sm.getString("streamProcessor.service.error"), e);
}
response.setStatus(500);
setErrorState(ErrorState.CLOSE_NOW, e);
}
if (sendfileState == SendfileState.PENDING) {
return SocketState.SENDFILE;
} else if (getErrorState().isError()) {
action(ActionCode.CLOSE, null);
request.updateCounters();
return SocketState.CLOSED;
} else if (isAsync()) {
return SocketState.LONG;
} else {
action(ActionCode.CLOSE, null);
request.updateCounters();
return SocketState.CLOSED;
}
}
/*
* In HTTP/1.1 some aspects of the request are validated as the request is
* parsed and the request rejected immediately with a 400 response. These
* checks are performed in Http11InputBuffer. Because, in Tomcat's HTTP/2
* implementation, incoming frames are processed on one thread while the
* corresponding request/response is processed on a separate thread,
* rejecting invalid requests is more involved.
*
* One approach would be to validate the request during parsing, note any
* validation errors and then generate a 400 response once processing moves
* to the separate request/response thread. This would require refactoring
* to track the validation errors.
*
* A second approach, and the one currently adopted, is to perform the
* validation shortly after processing of the received request passes to the
* separate thread and to generate a 400 response if validation fails.
*
* The checks performed below are based on the checks in Http11InputBuffer.
*/
private boolean validateRequest() {
HttpParser httpParser = new HttpParser(handler.getProtocol().getHttp11Protocol().getRelaxedPathChars(),
handler.getProtocol().getHttp11Protocol().getRelaxedQueryChars());
// Method name must be a token
String method = request.method().toString();
if (!HttpParser.isToken(method)) {
return false;
}
// Invalid character in request target
// (other checks such as valid %nn happen later)
ByteChunk bc = request.requestURI().getByteChunk();
for (int i = bc.getStart(); i < bc.getEnd(); i++) {
if (httpParser.isNotRequestTargetRelaxed(bc.getBuffer()[i])) {
return false;
}
}
// Ensure the query string doesn't contain invalid characters.
// (other checks such as valid %nn happen later)
String qs = request.queryString().toString();
if (qs != null) {
for (char c : qs.toCharArray()) {
if (!httpParser.isQueryRelaxed(c)) {
return false;
}
}
}
// HTTP header names must be tokens.
MimeHeaders headers = request.getMimeHeaders();
boolean previousHeaderWasPseudoHeader = true;
Enumeration<String> names = headers.names();
while (names.hasMoreElements()) {
String name = names.nextElement();
if (H2_PSEUDO_HEADERS_REQUEST.contains(name)) {
if (!previousHeaderWasPseudoHeader) {
return false;
}
} else if (!HttpParser.isToken(name)) {
previousHeaderWasPseudoHeader = false;
return false;
}
}
return true;
}
@Override
protected final boolean flushBufferedWrite() throws IOException {
if (log.isDebugEnabled()) {
log.debug(sm.getString("streamProcessor.flushBufferedWrite.entry",
stream.getConnectionId(), stream.getIdAsString()));
}
if (stream.flush(false)) {
// The buffer wasn't fully flushed so re-register the
// stream for write. Note this does not go via the
// Response since the write registration state at
// that level should remain unchanged. Once the buffer
// has been emptied then the code below will call
// dispatch() which will enable the
// Response to respond to this event.
if (stream.isReadyForWrite()) {
// Unexpected
throw new IllegalStateException();
}
return true;
}
return false;
}
@Override
protected final SocketState dispatchEndRequest() throws IOException {
return SocketState.CLOSED;
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.machinelearning.model;
import java.io.Serializable;
/**
* <p>
* Represents the query results from a <a>DescribeEvaluations</a> operation. The
* content is essentially a list of <code>Evaluation</code>.
* </p>
*/
public class DescribeEvaluationsResult implements Serializable, Cloneable {
/**
* <p>
* A list of <a>Evaluation</a> that meet the search criteria.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Evaluation> results;
/**
* <p>
* The ID of the next page in the paginated results that indicates at least
* one more page follows.
* </p>
*/
private String nextToken;
/**
* <p>
* A list of <a>Evaluation</a> that meet the search criteria.
* </p>
*
* @return A list of <a>Evaluation</a> that meet the search criteria.
*/
public java.util.List<Evaluation> getResults() {
if (results == null) {
results = new com.amazonaws.internal.SdkInternalList<Evaluation>();
}
return results;
}
/**
* <p>
* A list of <a>Evaluation</a> that meet the search criteria.
* </p>
*
* @param results
* A list of <a>Evaluation</a> that meet the search criteria.
*/
public void setResults(java.util.Collection<Evaluation> results) {
if (results == null) {
this.results = null;
return;
}
this.results = new com.amazonaws.internal.SdkInternalList<Evaluation>(
results);
}
/**
* <p>
* A list of <a>Evaluation</a> that meet the search criteria.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setResults(java.util.Collection)} or
* {@link #withResults(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param results
* A list of <a>Evaluation</a> that meet the search criteria.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeEvaluationsResult withResults(Evaluation... results) {
if (this.results == null) {
setResults(new com.amazonaws.internal.SdkInternalList<Evaluation>(
results.length));
}
for (Evaluation ele : results) {
this.results.add(ele);
}
return this;
}
/**
* <p>
* A list of <a>Evaluation</a> that meet the search criteria.
* </p>
*
* @param results
* A list of <a>Evaluation</a> that meet the search criteria.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeEvaluationsResult withResults(
java.util.Collection<Evaluation> results) {
setResults(results);
return this;
}
/**
* <p>
* The ID of the next page in the paginated results that indicates at least
* one more page follows.
* </p>
*
* @param nextToken
* The ID of the next page in the paginated results that indicates at
* least one more page follows.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The ID of the next page in the paginated results that indicates at least
* one more page follows.
* </p>
*
* @return The ID of the next page in the paginated results that indicates
* at least one more page follows.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The ID of the next page in the paginated results that indicates at least
* one more page follows.
* </p>
*
* @param nextToken
* The ID of the next page in the paginated results that indicates at
* least one more page follows.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeEvaluationsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResults() != null)
sb.append("Results: " + getResults() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeEvaluationsResult == false)
return false;
DescribeEvaluationsResult other = (DescribeEvaluationsResult) obj;
if (other.getResults() == null ^ this.getResults() == null)
return false;
if (other.getResults() != null
&& other.getResults().equals(this.getResults()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getResults() == null) ? 0 : getResults().hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeEvaluationsResult clone() {
try {
return (DescribeEvaluationsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.rest;
import org.jboss.resteasy.annotations.cache.NoCache;
import org.jboss.resteasy.spi.BadRequestException;
import org.keycloak.common.util.Time;
import org.keycloak.component.ComponentModel;
import org.keycloak.events.Event;
import org.keycloak.events.EventQuery;
import org.keycloak.events.EventStoreProvider;
import org.keycloak.events.EventType;
import org.keycloak.events.admin.AdminEvent;
import org.keycloak.events.admin.AdminEventQuery;
import org.keycloak.events.admin.AuthDetails;
import org.keycloak.events.admin.OperationType;
import org.keycloak.events.admin.ResourceType;
import org.keycloak.models.AuthenticationFlowModel;
import org.keycloak.models.ClientModel;
import org.keycloak.models.FederatedIdentityModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.RealmProvider;
import org.keycloak.models.UserCredentialModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.UserProvider;
import org.keycloak.models.UserSessionModel;
import org.keycloak.models.utils.ModelToRepresentation;
import org.keycloak.provider.ProviderFactory;
import org.keycloak.representations.idm.AdminEventRepresentation;
import org.keycloak.representations.idm.AuthDetailsRepresentation;
import org.keycloak.representations.idm.AuthenticationFlowRepresentation;
import org.keycloak.representations.idm.EventRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import org.keycloak.services.managers.RealmManager;
import org.keycloak.services.resource.RealmResourceProvider;
import org.keycloak.storage.UserStorageProvider;
import org.keycloak.testsuite.components.TestProvider;
import org.keycloak.testsuite.components.TestProviderFactory;
import org.keycloak.testsuite.events.EventsListenerProvider;
import org.keycloak.testsuite.federation.DummyUserFederationProviderFactory;
import org.keycloak.testsuite.forms.PassThroughAuthenticator;
import org.keycloak.testsuite.forms.PassThroughClientAuthenticator;
import org.keycloak.testsuite.rest.representation.AuthenticatorState;
import org.keycloak.testsuite.rest.resource.TestCacheResource;
import org.keycloak.testsuite.rest.resource.TestingExportImportResource;
import org.keycloak.testsuite.runonserver.ModuleUtil;
import org.keycloak.testsuite.runonserver.FetchOnServer;
import org.keycloak.testsuite.runonserver.RunOnServer;
import org.keycloak.testsuite.runonserver.SerializationUtil;
import org.keycloak.util.JsonSerialization;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class TestingResourceProvider implements RealmResourceProvider {
private KeycloakSession session;
@Override
public Object getResource() {
return this;
}
public TestingResourceProvider(KeycloakSession session) {
this.session = session;
}
@POST
@Path("/remove-user-session")
@Produces(MediaType.APPLICATION_JSON)
public Response removeUserSession(@QueryParam("realm") final String name, @QueryParam("session") final String sessionId) {
RealmManager realmManager = new RealmManager(session);
RealmModel realm = realmManager.getRealmByName(name);
if (realm == null) {
throw new NotFoundException("Realm not found");
}
UserSessionModel sessionModel = session.sessions().getUserSession(realm, sessionId);
if (sessionModel == null) {
throw new NotFoundException("Session not found");
}
session.sessions().removeUserSession(realm, sessionModel);
return Response.ok().build();
}
@POST
@Path("/remove-user-sessions")
@Produces(MediaType.APPLICATION_JSON)
public Response removeUserSessions(@QueryParam("realm") final String realmName) {
RealmManager realmManager = new RealmManager(session);
RealmModel realm = realmManager.getRealmByName(realmName);
if (realm == null) {
throw new NotFoundException("Realm not found");
}
session.sessions().removeUserSessions(realm);
return Response.ok().build();
}
@GET
@Path("/get-user-session")
@Produces(MediaType.APPLICATION_JSON)
public Integer getLastSessionRefresh(@QueryParam("realm") final String name, @QueryParam("session") final String sessionId) {
RealmManager realmManager = new RealmManager(session);
RealmModel realm = realmManager.getRealmByName(name);
if (realm == null) {
throw new NotFoundException("Realm not found");
}
UserSessionModel sessionModel = session.sessions().getUserSession(realm, sessionId);
if (sessionModel == null) {
throw new NotFoundException("Session not found");
}
return sessionModel.getLastSessionRefresh();
}
@POST
@Path("/remove-expired")
@Produces(MediaType.APPLICATION_JSON)
public Response removeExpired(@QueryParam("realm") final String name) {
RealmManager realmManager = new RealmManager(session);
RealmModel realm = realmManager.getRealmByName(name);
if (realm == null) {
throw new NotFoundException("Realm not found");
}
session.sessions().removeExpired(realm);
session.authenticationSessions().removeExpired(realm);
session.realms().removeExpiredClientInitialAccess();
return Response.ok().build();
}
@GET
@Path("/time-offset")
@Produces(MediaType.APPLICATION_JSON)
public Map<String, String> getTimeOffset() {
Map<String, String> response = new HashMap<>();
response.put("currentTime", String.valueOf(Time.currentTime()));
response.put("offset", String.valueOf(Time.getOffset()));
return response;
}
@PUT
@Path("/time-offset")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Map<String, String> setTimeOffset(Map<String, String> time) {
int offset = Integer.parseInt(time.get("offset"));
Time.setOffset(offset);
return getTimeOffset();
}
@POST
@Path("/poll-event-queue")
@Produces(MediaType.APPLICATION_JSON)
public EventRepresentation getEvent() {
Event event = EventsListenerProvider.poll();
if (event != null) {
return ModelToRepresentation.toRepresentation(event);
} else {
return null;
}
}
@POST
@Path("/poll-admin-event-queue")
@Produces(MediaType.APPLICATION_JSON)
public AdminEventRepresentation getAdminEvent() {
AdminEvent adminEvent = EventsListenerProvider.pollAdminEvent();
if (adminEvent != null) {
return ModelToRepresentation.toRepresentation(adminEvent);
} else {
return null;
}
}
@POST
@Path("/clear-event-queue")
@Produces(MediaType.APPLICATION_JSON)
public Response clearEventQueue() {
EventsListenerProvider.clear();
return Response.ok().build();
}
@POST
@Path("/clear-admin-event-queue")
@Produces(MediaType.APPLICATION_JSON)
public Response clearAdminEventQueue() {
EventsListenerProvider.clearAdminEvents();
return Response.ok().build();
}
@GET
@Path("/clear-event-store")
@Produces(MediaType.APPLICATION_JSON)
public Response clearEventStore() {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.clear();
return Response.ok().build();
}
@GET
@Path("/clear-event-store-for-realm")
@Produces(MediaType.APPLICATION_JSON)
public Response clearEventStore(@QueryParam("realmId") String realmId) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.clear(realmId);
return Response.ok().build();
}
@GET
@Path("/clear-event-store-older-than")
@Produces(MediaType.APPLICATION_JSON)
public Response clearEventStore(@QueryParam("realmId") String realmId, @QueryParam("olderThan") long olderThan) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.clear(realmId, olderThan);
return Response.ok().build();
}
/**
* Query events
*
* Returns all events, or filters them based on URL query parameters listed here
*
* @param realmId The realm
* @param types The types of events to return
* @param client App or oauth client name
* @param user User id
* @param dateFrom From date
* @param dateTo To date
* @param ipAddress IP address
* @param firstResult Paging offset
* @param maxResults Paging size
* @return
*/
@Path("query-events")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public List<EventRepresentation> queryEvents(@QueryParam("realmId") String realmId, @QueryParam("type") List<String> types, @QueryParam("client") String client,
@QueryParam("user") String user, @QueryParam("dateFrom") String dateFrom, @QueryParam("dateTo") String dateTo,
@QueryParam("ipAddress") String ipAddress, @QueryParam("first") Integer firstResult,
@QueryParam("max") Integer maxResults) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
EventQuery query = eventStore.createQuery();
if (realmId != null) {
query.realm(realmId);
}
if (client != null) {
query.client(client);
}
if (types != null & !types.isEmpty()) {
EventType[] t = new EventType[types.size()];
for (int i = 0; i < t.length; i++) {
t[i] = EventType.valueOf(types.get(i));
}
query.type(t);
}
if (user != null) {
query.user(user);
}
if(dateFrom != null) {
Date from = formatDate(dateFrom, "Date(From)");
query.fromDate(from);
}
if(dateTo != null) {
Date to = formatDate(dateTo, "Date(To)");
query.toDate(to);
}
if (ipAddress != null) {
query.ipAddress(ipAddress);
}
if (firstResult != null) {
query.firstResult(firstResult);
}
if (maxResults != null) {
query.maxResults(maxResults);
}
return toEventListRep(query.getResultList());
}
private List<EventRepresentation> toEventListRep(List<Event> events) {
List<EventRepresentation> reps = new ArrayList<>();
for (Event event : events) {
reps.add(ModelToRepresentation.toRepresentation(event));
}
return reps;
}
@PUT
@Path("/on-event")
@Consumes(MediaType.APPLICATION_JSON)
public void onEvent(final EventRepresentation rep) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.onEvent(repToModel(rep));
}
private Event repToModel(EventRepresentation rep) {
Event event = new Event();
event.setClientId(rep.getClientId());
event.setDetails(rep.getDetails());
event.setError(rep.getError());
event.setIpAddress(rep.getIpAddress());
event.setRealmId(rep.getRealmId());
event.setSessionId(rep.getSessionId());
event.setTime(rep.getTime());
event.setType(EventType.valueOf(rep.getType()));
event.setUserId(rep.getUserId());
return event;
}
@GET
@Path("/clear-admin-event-store")
@Produces(MediaType.APPLICATION_JSON)
public Response clearAdminEventStore() {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.clearAdmin();
return Response.ok().build();
}
@GET
@Path("/clear-admin-event-store-for-realm")
@Produces(MediaType.APPLICATION_JSON)
public Response clearAdminEventStore(@QueryParam("realmId") String realmId) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.clearAdmin(realmId);
return Response.ok().build();
}
@GET
@Path("/clear-admin-event-store-older-than")
@Produces(MediaType.APPLICATION_JSON)
public Response clearAdminEventStore(@QueryParam("realmId") String realmId, @QueryParam("olderThan") long olderThan) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.clearAdmin(realmId, olderThan);
return Response.ok().build();
}
/**
* Get admin events
*
* Returns all admin events, or filters events based on URL query parameters listed here
*
* @param realmId
* @param operationTypes
* @param authRealm
* @param authClient
* @param authUser user id
* @param authIpAddress
* @param resourcePath
* @param dateFrom
* @param dateTo
* @param firstResult
* @param maxResults
* @return
*/
@Path("query-admin-events")
@GET
@NoCache
@Produces(MediaType.APPLICATION_JSON)
public List<AdminEventRepresentation> getAdminEvents(@QueryParam("realmId") String realmId, @QueryParam("operationTypes") List<String> operationTypes, @QueryParam("authRealm") String authRealm, @QueryParam("authClient") String authClient,
@QueryParam("authUser") String authUser, @QueryParam("authIpAddress") String authIpAddress,
@QueryParam("resourcePath") String resourcePath, @QueryParam("dateFrom") String dateFrom,
@QueryParam("dateTo") String dateTo, @QueryParam("first") Integer firstResult,
@QueryParam("max") Integer maxResults) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
AdminEventQuery query = eventStore.createAdminQuery();
if (realmId != null) {
query.realm(realmId);
};
if (authRealm != null) {
query.authRealm(authRealm);
}
if (authClient != null) {
query.authClient(authClient);
}
if (authUser != null) {
query.authUser(authUser);
}
if (authIpAddress != null) {
query.authIpAddress(authIpAddress);
}
if (resourcePath != null) {
query.resourcePath(resourcePath);
}
if (operationTypes != null && !operationTypes.isEmpty()) {
OperationType[] t = new OperationType[operationTypes.size()];
for (int i = 0; i < t.length; i++) {
t[i] = OperationType.valueOf(operationTypes.get(i));
}
query.operation(t);
}
if(dateFrom != null) {
Date from = formatDate(dateFrom, "Date(From)");
query.fromTime(from);
}
if(dateTo != null) {
Date to = formatDate(dateTo, "Date(To)");
query.toTime(to);
}
if (firstResult != null || maxResults != null) {
if (firstResult == null) {
firstResult = 0;
}
if (maxResults == null) {
maxResults = 100;
}
query.firstResult(firstResult);
query.maxResults(maxResults);
}
return toAdminEventRep(query.getResultList());
}
private Date formatDate(String date, String paramName) {
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd");
try {
return df.parse(date);
} catch (ParseException e) {
throw new BadRequestException("Invalid value for '" + paramName + "', expected format is yyyy-MM-dd");
}
}
private List<AdminEventRepresentation> toAdminEventRep(List<AdminEvent> events) {
List<AdminEventRepresentation> reps = new ArrayList<>();
for (AdminEvent event : events) {
reps.add(ModelToRepresentation.toRepresentation(event));
}
return reps;
}
@POST
@Path("/on-admin-event")
@Consumes(MediaType.APPLICATION_JSON)
public void onAdminEvent(final AdminEventRepresentation rep, @QueryParam("includeRepresentation") boolean includeRepresentation) {
EventStoreProvider eventStore = session.getProvider(EventStoreProvider.class);
eventStore.onEvent(repToModel(rep), includeRepresentation);
}
private AdminEvent repToModel(AdminEventRepresentation rep) {
AdminEvent event = new AdminEvent();
event.setAuthDetails(repToModel(rep.getAuthDetails()));
event.setError(rep.getError());
event.setOperationType(OperationType.valueOf(rep.getOperationType()));
if (rep.getResourceType() != null) {
event.setResourceType(ResourceType.valueOf(rep.getResourceType()));
}
event.setRealmId(rep.getRealmId());
event.setRepresentation(rep.getRepresentation());
event.setResourcePath(rep.getResourcePath());
event.setTime(rep.getTime());
return event;
}
private AuthDetails repToModel(AuthDetailsRepresentation rep) {
AuthDetails details = new AuthDetails();
details.setClientId(rep.getClientId());
details.setIpAddress(rep.getIpAddress());
details.setRealmId(rep.getRealmId());
details.setUserId(rep.getUserId());
return details;
}
@Path("/cache/{cache}")
public TestCacheResource getCacheResource(@PathParam("cache") String cacheName) {
return new TestCacheResource(session, cacheName);
}
@Override
public void close() {
}
@POST
@Path("/update-pass-through-auth-state")
@Produces(MediaType.APPLICATION_JSON)
public AuthenticatorState updateAuthenticator(AuthenticatorState state) {
if (state.getClientId() != null) {
PassThroughClientAuthenticator.clientId = state.getClientId();
}
if (state.getUsername() != null) {
PassThroughAuthenticator.username = state.getUsername();
}
AuthenticatorState result = new AuthenticatorState();
result.setClientId(PassThroughClientAuthenticator.clientId);
result.setUsername(PassThroughAuthenticator.username);
return result;
}
@GET
@Path("/valid-credentials")
@Produces(MediaType.APPLICATION_JSON)
public boolean validCredentials(@QueryParam("realmName") String realmName, @QueryParam("userName") String userName, @QueryParam("password") String password) {
RealmModel realm = session.realms().getRealm(realmName);
if (realm == null) return false;
UserProvider userProvider = session.getProvider(UserProvider.class);
UserModel user = userProvider.getUserByUsername(userName, realm);
return session.userCredentialManager().isValid(realm, user, UserCredentialModel.password(password));
}
@GET
@Path("/user-by-federated-identity")
@Produces(MediaType.APPLICATION_JSON)
public UserRepresentation getUserByFederatedIdentity(@QueryParam("realmName") String realmName,
@QueryParam("identityProvider") String identityProvider,
@QueryParam("userId") String userId,
@QueryParam("userName") String userName) {
RealmModel realm = getRealmByName(realmName);
UserModel foundFederatedUser = session.users().getUserByFederatedIdentity(new FederatedIdentityModel(identityProvider, userId, userName), realm);
if (foundFederatedUser == null) return null;
return ModelToRepresentation.toRepresentation(session, realm, foundFederatedUser);
}
@GET
@Path("/user-by-username-from-fed-factory")
@Produces(MediaType.APPLICATION_JSON)
public UserRepresentation getUserByUsernameFromFedProviderFactory(@QueryParam("realmName") String realmName,
@QueryParam("userName") String userName) {
RealmModel realm = getRealmByName(realmName);
DummyUserFederationProviderFactory factory = (DummyUserFederationProviderFactory)session.getKeycloakSessionFactory().getProviderFactory(UserStorageProvider.class, "dummy");
UserModel user = factory.create(session, null).getUserByUsername(userName, realm);
if (user == null) return null;
return ModelToRepresentation.toRepresentation(session, realm, user);
}
@GET
@Path("/get-client-auth-flow")
@Produces(MediaType.APPLICATION_JSON)
public AuthenticationFlowRepresentation getClientAuthFlow(@QueryParam("realmName") String realmName) {
RealmModel realm = getRealmByName(realmName);
AuthenticationFlowModel flow = realm.getClientAuthenticationFlow();
if (flow == null) return null;
return ModelToRepresentation.toRepresentation(realm, flow);
}
@GET
@Path("/get-reset-cred-flow")
@Produces(MediaType.APPLICATION_JSON)
public AuthenticationFlowRepresentation getResetCredFlow(@QueryParam("realmName") String realmName) {
RealmModel realm = getRealmByName(realmName);
AuthenticationFlowModel flow = realm.getResetCredentialsFlow();
if (flow == null) return null;
return ModelToRepresentation.toRepresentation(realm, flow);
}
@GET
@Path("/get-user-by-service-account-client")
@Produces(MediaType.APPLICATION_JSON)
public UserRepresentation getUserByServiceAccountClient(@QueryParam("realmName") String realmName, @QueryParam("clientId") String clientId) {
RealmModel realm = getRealmByName(realmName);
ClientModel client = realm.getClientByClientId(clientId);
UserModel user = session.users().getServiceAccount(client);
if (user == null) return null;
return ModelToRepresentation.toRepresentation(session, realm, user);
}
@Path("/export-import")
public TestingExportImportResource getExportImportResource() {
return new TestingExportImportResource(session);
}
@GET
@Path("/test-component")
@Produces(MediaType.APPLICATION_JSON)
public Map<String, TestProvider.DetailsRepresentation> getTestComponentDetails() {
Map<String, TestProvider.DetailsRepresentation> reps = new HashMap<>();
RealmModel realm = session.getContext().getRealm();
for (ComponentModel c : realm.getComponents(realm.getId(), TestProvider.class.getName())) {
ProviderFactory<TestProvider> f = session.getKeycloakSessionFactory().getProviderFactory(TestProvider.class, c.getProviderId());
TestProviderFactory factory = (TestProviderFactory) f;
TestProvider p = (TestProvider) factory.create(session, c);
reps.put(c.getName(), p.getDetails());
}
return reps;
}
@GET
@Path("/identity-config")
@Produces(MediaType.APPLICATION_JSON)
public Map<String, String> getIdentityProviderConfig(@QueryParam("alias") String alias) {
return session.getContext().getRealm().getIdentityProviderByAlias(alias).getConfig();
}
@PUT
@Path("/set-krb5-conf-file")
@Consumes(MediaType.APPLICATION_JSON)
public void setKrb5ConfFile(@QueryParam("krb5-conf-file") String krb5ConfFile) {
System.setProperty("java.security.krb5.conf", krb5ConfFile);
}
@POST
@Path("/run-on-server")
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.TEXT_PLAIN)
public String runOnServer(String runOnServer) throws Exception {
try {
ClassLoader cl = ModuleUtil.isModules() ? ModuleUtil.getClassLoader() : getClass().getClassLoader();
Object r = SerializationUtil.decode(runOnServer, cl);
if (r instanceof FetchOnServer) {
Object result = ((FetchOnServer) r).run(session);
return result != null ? JsonSerialization.writeValueAsString(result) : null;
} else if (r instanceof RunOnServer) {
((RunOnServer) r).run(session);
return null;
} else {
throw new IllegalArgumentException();
}
} catch (Throwable t) {
return SerializationUtil.encodeException(t);
}
}
private RealmModel getRealmByName(String realmName) {
RealmProvider realmProvider = session.getProvider(RealmProvider.class);
return realmProvider.getRealmByName(realmName);
}
}
| |
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hystrix.contrib.javanica.test.common.fallback;
import com.netflix.hystrix.HystrixEventType;
import com.netflix.hystrix.HystrixInvokableInfo;
import com.netflix.hystrix.HystrixRequestLog;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixCommand;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixProperty;
import com.netflix.hystrix.contrib.javanica.command.AsyncResult;
import com.netflix.hystrix.contrib.javanica.exception.FallbackDefinitionException;
import com.netflix.hystrix.contrib.javanica.test.common.BasicHystrixTest;
import com.netflix.hystrix.contrib.javanica.test.common.domain.Domain;
import com.netflix.hystrix.contrib.javanica.test.common.domain.User;
import org.apache.commons.lang3.Validate;
import org.junit.Before;
import org.junit.Test;
import rx.Observable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static com.netflix.hystrix.contrib.javanica.test.common.CommonUtils.getHystrixCommandByKey;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public abstract class BasicCommandFallbackTest extends BasicHystrixTest {
private UserService userService;
protected abstract UserService createUserService();
@Before
public void setUp() throws Exception {
super.setUp();
userService = createUserService();
}
@Test
public void testGetUserAsyncWithFallback() throws ExecutionException, InterruptedException {
Future<User> f1 = userService.getUserAsync(" ", "name: ");
assertEquals("def", f1.get().getName());
assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
HystrixInvokableInfo<?> command = HystrixRequestLog.getCurrentRequest()
.getAllExecutedCommands().iterator().next();
assertEquals("getUserAsync", command.getCommandKey().name());
// confirm that 'getUserAsync' command has failed
assertTrue(command.getExecutionEvents().contains(HystrixEventType.FAILURE));
// and that fallback waw successful
assertTrue(command.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS));
}
@Test
public void testGetUserSyncWithFallback() {
User u1 = userService.getUserSync(" ", "name: ");
assertEquals("def", u1.getName());
assertEquals(1, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
HystrixInvokableInfo<?> command = HystrixRequestLog.getCurrentRequest()
.getAllExecutedCommands().iterator().next();
assertEquals("getUserSync", command.getCommandKey().name());
// confirm that command has failed
assertTrue(command.getExecutionEvents().contains(HystrixEventType.FAILURE));
// and that fallback was successful
assertTrue(command.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS));
}
/**
* * **************************** *
* * * TEST FALLBACK COMMANDS * *
* * **************************** *
*/
@Test
public void testGetUserAsyncWithFallbackCommand() throws ExecutionException, InterruptedException {
Future<User> f1 = userService.getUserAsyncFallbackCommand(" ", "name: ");
assertEquals("def", f1.get().getName());
assertEquals(3, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
HystrixInvokableInfo<?> getUserAsyncFallbackCommand = getHystrixCommandByKey(
"getUserAsyncFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo firstFallbackCommand = getHystrixCommandByKey("firstFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo secondFallbackCommand = getHystrixCommandByKey("secondFallbackCommand");
assertEquals("getUserAsyncFallbackCommand", getUserAsyncFallbackCommand.getCommandKey().name());
// confirm that command has failed
assertTrue(getUserAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
// confirm that first fallback has failed
assertTrue(firstFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
// and that second fallback was successful
assertTrue(secondFallbackCommand.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS));
}
@Test
public void testGetUserAsyncFallbackAsyncCommand() throws ExecutionException, InterruptedException {
Future<User> f1 = userService.getUserAsyncFallbackAsyncCommand(" ", "name: ");
assertEquals("def", f1.get().getName());
assertEquals(4, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
HystrixInvokableInfo<?> getUserAsyncFallbackAsyncCommand = getHystrixCommandByKey(
"getUserAsyncFallbackAsyncCommand");
com.netflix.hystrix.HystrixInvokableInfo firstAsyncFallbackCommand = getHystrixCommandByKey("firstAsyncFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo secondAsyncFallbackCommand = getHystrixCommandByKey("secondAsyncFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo thirdAsyncFallbackCommand = getHystrixCommandByKey("thirdAsyncFallbackCommand");
assertEquals("getUserAsyncFallbackAsyncCommand", getUserAsyncFallbackAsyncCommand.getCommandKey().name());
// confirm that command has failed
assertTrue(getUserAsyncFallbackAsyncCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
// confirm that first fallback has failed
assertTrue(firstAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
// and that second fallback was successful
assertTrue(secondAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
assertTrue(thirdAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
assertTrue(thirdAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS));
}
@Test
public void testGetUserSyncWithFallbackCommand() {
User u1 = userService.getUserSyncFallbackCommand(" ", "name: ");
assertEquals("def", u1.getName());
assertEquals(3, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
HystrixInvokableInfo<?> getUserSyncFallbackCommand = getHystrixCommandByKey(
"getUserSyncFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo firstFallbackCommand = getHystrixCommandByKey("firstFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo secondFallbackCommand = getHystrixCommandByKey("secondFallbackCommand");
assertEquals("getUserSyncFallbackCommand", getUserSyncFallbackCommand.getCommandKey().name());
// confirm that command has failed
assertTrue(getUserSyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
// confirm that first fallback has failed
assertTrue(firstFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
// and that second fallback was successful
assertTrue(secondFallbackCommand.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS));
}
@Test
public void testAsyncCommandWithAsyncFallbackCommand() throws ExecutionException, InterruptedException {
Future<User> userFuture = userService.asyncCommandWithAsyncFallbackCommand("", "");
User user = userFuture.get();
assertEquals("def", user.getId());
assertEquals(2, HystrixRequestLog.getCurrentRequest().getAllExecutedCommands().size());
HystrixInvokableInfo<?> asyncCommandWithAsyncFallbackCommand = getHystrixCommandByKey("asyncCommandWithAsyncFallbackCommand");
com.netflix.hystrix.HystrixInvokableInfo asyncFallbackCommand = getHystrixCommandByKey("asyncFallbackCommand");
// confirm that command has failed
assertTrue(asyncCommandWithAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FAILURE));
assertTrue(asyncCommandWithAsyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.FALLBACK_SUCCESS));
// and that second fallback was successful
assertTrue(asyncFallbackCommand.getExecutionEvents().contains(HystrixEventType.SUCCESS));
}
@Test(expected = FallbackDefinitionException.class)
public void testAsyncCommandWithAsyncFallback() {
userService.asyncCommandWithAsyncFallback("", "");
}
@Test(expected = FallbackDefinitionException.class)
public void testCommandWithWrongFallbackReturnType() {
userService.commandWithWrongFallbackReturnType("", "");
}
@Test(expected = FallbackDefinitionException.class)
public void testAsyncCommandWithWrongFallbackReturnType() {
userService.asyncCommandWithWrongFallbackReturnType("", "");
}
@Test(expected = FallbackDefinitionException.class)
public void testCommandWithWrongFallbackParams() {
userService.commandWithWrongFallbackParams("1", "2");
}
@Test(expected = FallbackDefinitionException.class)
public void testCommandWithFallbackReturnSuperType() {
userService.commandWithFallbackReturnSuperType("", "");
}
@Test
public void testCommandWithFallbackReturnSubType() {
User user = (User) userService.commandWithFallbackReturnSubType("", "");
assertEquals("def", user.getName());
}
@Test
public void testCommandWithFallbackWithAdditionalParameter() {
User user = userService.commandWithFallbackWithAdditionalParameter("", "");
assertEquals("def", user.getName());
}
public static class UserService {
@HystrixCommand(fallbackMethod = "fallback")
public Future<User> getUserAsync(final String id, final String name) {
validate(id, name); // validate logic can be inside and outside of AsyncResult#invoke method
return new AsyncResult<User>() {
@Override
public User invoke() {
// validate(id, name); possible put validation logic here, in case of any exception a fallback method will be invoked
return new User(id, name + id); // it should be network call
}
};
}
@HystrixCommand(fallbackMethod = "fallback")
public User getUserSync(String id, String name) {
validate(id, name);
return new User(id, name + id); // it should be network call
}
private User fallback(String id, String name) {
return new User("def", "def");
}
@HystrixCommand(fallbackMethod = "firstFallbackCommand")
public Future<User> getUserAsyncFallbackCommand(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
validate(id, name);
return new User(id, name + id); // it should be network call
}
};
}
@HystrixCommand(fallbackMethod = "firstFallbackCommand")
public User getUserSyncFallbackCommand(String id, String name) {
validate(id, name);
return new User(id, name + id); // it should be network call
}
// FALLBACK COMMANDS METHODS:
// This fallback methods will be processed as hystrix commands
@HystrixCommand(fallbackMethod = "secondFallbackCommand")
private User firstFallbackCommand(String id, String name) {
validate(id, name);
return new User(id, name + id); // it should be network call
}
@HystrixCommand(fallbackMethod = "staticFallback")
private User secondFallbackCommand(String id, String name) {
validate(id, name);
return new User(id, name + id); // it should be network call
}
@HystrixCommand(fallbackMethod = "firstAsyncFallbackCommand")
public Future<User> getUserAsyncFallbackAsyncCommand(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
throw new RuntimeException("getUserAsyncFallbackAsyncCommand failed");
}
};
}
@HystrixCommand(fallbackMethod = "secondAsyncFallbackCommand")
private Future<User> firstAsyncFallbackCommand(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
throw new RuntimeException("firstAsyncFallbackCommand failed");
}
};
}
@HystrixCommand(fallbackMethod = "thirdAsyncFallbackCommand")
private Future<User> secondAsyncFallbackCommand(final String id, final String name, final Throwable e) {
return new AsyncResult<User>() {
@Override
public User invoke() {
if ("firstAsyncFallbackCommand failed".equals(e.getMessage())) {
throw new RuntimeException("secondAsyncFallbackCommand failed");
}
return new User(id, name + id);
}
};
}
@HystrixCommand(fallbackMethod = "fallbackWithAdditionalParam")
private Future<User> thirdAsyncFallbackCommand(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
throw new RuntimeException("thirdAsyncFallbackCommand failed");
}
};
}
private User fallbackWithAdditionalParam(final String id, final String name, final Throwable e) {
if (!"thirdAsyncFallbackCommand failed".equals(e.getMessage())) {
throw new RuntimeException("fallbackWithAdditionalParam failed");
}
return new User("def", "def");
}
@HystrixCommand(fallbackMethod = "asyncFallbackCommand", commandProperties = {
@HystrixProperty(name = "execution.isolation.thread.timeoutInMilliseconds", value = "100000")
})
public Future<User> asyncCommandWithAsyncFallbackCommand(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
validate(id, name);
return new User(id, name + id); // it should be network call
}
};
}
@HystrixCommand(fallbackMethod = "asyncFallback", commandProperties = {
@HystrixProperty(name = "execution.isolation.thread.timeoutInMilliseconds", value = "100000")
})
public Future<User> asyncCommandWithAsyncFallback(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
validate(id, name);
return new User(id, name + id); // it should be network call
}
};
}
public Future<User> asyncFallback(final String id, final String name) {
return Observable.just(new User("def", "def")).toBlocking().toFuture();
}
@HystrixCommand
public Future<User> asyncFallbackCommand(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
return new User("def", "def"); // it should be network call
}
};
}
@HystrixCommand(fallbackMethod = "fallbackWithAdditionalParameter")
public User commandWithFallbackWithAdditionalParameter(final String id, final String name) {
validate(id, name);
return new User(id, name + id);
}
public User fallbackWithAdditionalParameter(final String id, final String name, Throwable e) {
if (e == null) {
throw new RuntimeException("exception should be not null");
}
return new User("def", "def");
}
@HystrixCommand(fallbackMethod = "fallbackWithStringReturnType")
public User commandWithWrongFallbackReturnType(final String id, final String name) {
validate(id, name);
return new User(id, name);
}
@HystrixCommand(fallbackMethod = "fallbackWithStringReturnType")
public Future<User> asyncCommandWithWrongFallbackReturnType(final String id, final String name) {
return new AsyncResult<User>() {
@Override
public User invoke() {
return new User("def", "def"); // it should be network call
}
};
}
@HystrixCommand(fallbackMethod = "fallbackWithoutParameters")
public User commandWithWrongFallbackParams(final String id, final String name) {
return new User(id, name);
}
@HystrixCommand(fallbackMethod = "fallbackReturnSubTypeOfDomain")
public Domain commandWithFallbackReturnSubType(final String id, final String name) {
validate(id, name);
return new User(id, name);
}
@HystrixCommand(fallbackMethod = "fallbackReturnSuperTypeOfDomain")
public User commandWithFallbackReturnSuperType(final String id, final String name) {
validate(id, name);
return new User(id, name);
}
private User fallbackReturnSubTypeOfDomain(final String id, final String name) {
return new User("def", "def");
}
private Domain fallbackReturnSuperTypeOfDomain(final String id, final String name) {
return new User("def", "def");
}
private String fallbackWithStringReturnType(final String id, final String name) {
return null;
}
private User fallbackWithoutParameters() {
return null;
}
private User staticFallback(String id, String name) {
return new User("def", "def");
}
private void validate(String id, String name) {
Validate.notBlank(id);
Validate.notBlank(name);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.maven.slingstart;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.lang.reflect.Field;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.Set;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.artifact.handler.ArtifactHandler;
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Build;
import org.apache.maven.project.MavenProject;
import org.apache.sling.provisioning.model.Model;
import org.apache.sling.provisioning.model.io.ModelReader;
import org.codehaus.plexus.archiver.UnArchiver;
import org.codehaus.plexus.archiver.manager.ArchiverManager;
import org.junit.Test;
import org.mockito.Mockito;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
public class PreparePackageMojoTest {
@Test
public void testBSNRenaming() throws Exception {
// Provide the system with some artifacts that are known to be in the local .m2 repo
// These are explicitly included in the test section of the pom.xml
PreparePackageMojo ppm = getMojoUnderTest(
"org.apache.sling/org.apache.sling.commons.classloader/1.3.2",
"org.apache.sling/org.apache.sling.commons.classloader/1.3.2/app",
"org.apache.sling/org.apache.sling.commons.json/2.0.12");
try {
String modelTxt = "[feature name=:launchpad]\n" +
"[artifacts]\n" +
" org.apache.sling/org.apache.sling.commons.classloader/1.3.2\n" +
"" +
"[feature name=rename_test]\n" +
" org.apache.sling/org.apache.sling.commons.json/2.0.12 [bundle:rename-bsn=r-foo.bar.renamed.sling.commons.json]\n";
Model model = ModelReader.read(new StringReader(modelTxt), null);
ppm.execute(model);
File orgJar = getMavenArtifactFile(getMavenRepoRoot(), "org.apache.sling", "org.apache.sling.commons.json", "2.0.12");
File generatedJar = new File(ppm.getTmpDir() + "/r-foo.bar.renamed.sling.commons.json-2.0.12.jar");
compareJarContents(orgJar, generatedJar);
try (JarFile jfOrg = new JarFile(orgJar);
JarFile jfNew = new JarFile(generatedJar)) {
Manifest mfOrg = jfOrg.getManifest();
Manifest mfNew = jfNew.getManifest();
Attributes orgAttrs = mfOrg.getMainAttributes();
Attributes newAttrs = mfNew.getMainAttributes();
for (Object key : orgAttrs.keySet()) {
String orgVal = orgAttrs.getValue(key.toString());
String newVal = newAttrs.getValue(key.toString());
if ("Bundle-SymbolicName".equals(key.toString())) {
assertEquals("Should have recorded the original Bundle-SymbolicName",
orgVal, newAttrs.getValue("X-Original-Bundle-SymbolicName"));
assertEquals("r-foo.bar.renamed.sling.commons.json", newVal);
} else {
assertEquals("Different keys: " + key, orgVal, newVal);
}
}
}
} finally {
FileUtils.deleteDirectory(new File(ppm.project.getBuild().getDirectory()));
}
}
private static void compareJarContents(File orgJar, File actualJar) throws IOException {
try (JarInputStream jis1 = new JarInputStream(new FileInputStream(orgJar));
JarInputStream jis2 = new JarInputStream(new FileInputStream(actualJar))) {
JarEntry je1 = null;
while ((je1 = jis1.getNextJarEntry()) != null) {
if (je1.isDirectory())
continue;
JarEntry je2 = null;
while((je2 = jis2.getNextJarEntry()) != null) {
if (!je2.isDirectory())
break;
}
assertEquals(je1.getName(), je2.getName());
assertEquals(je1.getSize(), je2.getSize());
try {
byte[] buf1 = IOUtils.toByteArray(jis1);
byte[] buf2 = IOUtils.toByteArray(jis2);
assertArrayEquals("Contents not equal: " + je1.getName(), buf1, buf2);
} finally {
jis1.closeEntry();
jis2.closeEntry();
}
}
}
}
@Test
public void testSubsystemBaseGeneration() throws Exception {
// Provide the system with some artifacts that are known to be in the local .m2 repo
// These are explicitly included in the test section of the pom.xml
PreparePackageMojo ppm = getMojoUnderTest(
"org.apache.sling/org.apache.sling.commons.classloader/1.3.2",
"org.apache.sling/org.apache.sling.commons.classloader/1.3.2/app",
"org.apache.sling/org.apache.sling.commons.contentdetection/1.0.2",
"org.apache.sling/org.apache.sling.commons.json/2.0.12",
"org.apache.sling/org.apache.sling.commons.mime/2.1.8",
"org.apache.sling/org.apache.sling.commons.osgi/2.3.0",
"org.apache.sling/org.apache.sling.commons.threads/3.2.0");
try {
// The launchpad feature is a prerequisite for the model
String modelTxt =
"[feature name=:launchpad]\n" +
"[artifacts]\n" +
" org.apache.sling/org.apache.sling.commons.classloader/1.3.2\n" +
"" +
"[feature name=test1 type=osgi.subsystem.composite]\n" +
"" +
"[:subsystem-manifest startLevel=123]\n" +
" Subsystem-Description: Extra subsystem headers can go here including very long ones that would span multiple lines in a manifest\n" +
" Subsystem-Copyright: (c) 2015 yeah!\n" +
"" +
"[artifacts]\n" +
" org.apache.sling/org.apache.sling.commons.osgi/2.3.0\n" +
"" +
"[artifacts startLevel=10]\n" +
" org.apache.sling/org.apache.sling.commons.json/2.0.12\n" +
" org.apache.sling/org.apache.sling.commons.mime/2.1.8\n" +
"" +
"[artifacts startLevel=20 runModes=foo,bar,:blah]\n" +
" org.apache.sling/org.apache.sling.commons.threads/3.2.0\n" +
"" +
"[artifacts startLevel=100 runModes=bar]\n" +
" org.apache.sling/org.apache.sling.commons.contentdetection/1.0.2\n";
Model model = ModelReader.read(new StringReader(modelTxt), null);
ppm.execute(model);
File generatedFile = new File(ppm.getTmpDir() + "/test1.subsystem-base");
try (JarFile jf = new JarFile(generatedFile)) {
// Test META-INF/MANIFEST.MF
Manifest mf = jf.getManifest();
Attributes attrs = mf.getMainAttributes();
String expected = "Potential_Bundles/0/org.apache.sling.commons.osgi-2.3.0.jar|"
+ "Potential_Bundles/10/org.apache.sling.commons.json-2.0.12.jar|"
+ "Potential_Bundles/10/org.apache.sling.commons.mime-2.1.8.jar";
assertEquals(expected, attrs.getValue("_all_"));
assertEquals("Potential_Bundles/20/org.apache.sling.commons.threads-3.2.0.jar", attrs.getValue("foo"));
assertEquals("Potential_Bundles/20/org.apache.sling.commons.threads-3.2.0.jar|"
+ "Potential_Bundles/100/org.apache.sling.commons.contentdetection-1.0.2.jar", attrs.getValue("bar"));
// Test SUBSYSTEM-MANIFEST-BASE.MF
ZipEntry smbZE = jf.getEntry("SUBSYSTEM-MANIFEST-BASE.MF");
try (InputStream smbIS = jf.getInputStream(smbZE)) {
Manifest smbMF = new Manifest(smbIS);
Attributes smbAttrs = smbMF.getMainAttributes();
assertEquals("test1", smbAttrs.getValue("Subsystem-SymbolicName"));
assertEquals("osgi.subsystem.composite", smbAttrs.getValue("Subsystem-Type"));
assertEquals("(c) 2015 yeah!", smbAttrs.getValue("Subsystem-Copyright"));
assertEquals("Extra subsystem headers can go here including very long ones "
+ "that would span multiple lines in a manifest",
smbAttrs.getValue("Subsystem-Description"));
}
// Test embedded bundles
File mrr = getMavenRepoRoot();
File soj = getMavenArtifactFile(mrr, "org.apache.sling", "org.apache.sling.commons.osgi", "2.3.0");
ZipEntry sojZE = jf.getEntry("Potential_Bundles/0/org.apache.sling.commons.osgi-2.3.0.jar");
try (InputStream is = jf.getInputStream(sojZE)) {
assertArtifactsEqual(soj, is);
}
File sjj = getMavenArtifactFile(mrr, "org.apache.sling", "org.apache.sling.commons.json", "2.0.12");
ZipEntry sjZE = jf.getEntry("Potential_Bundles/10/org.apache.sling.commons.json-2.0.12.jar");
try (InputStream is = jf.getInputStream(sjZE)) {
assertArtifactsEqual(sjj, is);
}
File smj = getMavenArtifactFile(mrr, "org.apache.sling", "org.apache.sling.commons.mime", "2.1.8");
ZipEntry smjZE = jf.getEntry("Potential_Bundles/10/org.apache.sling.commons.mime-2.1.8.jar");
try (InputStream is = jf.getInputStream(smjZE)) {
assertArtifactsEqual(smj, is);
}
File stj = getMavenArtifactFile(mrr, "org.apache.sling", "org.apache.sling.commons.threads", "3.2.0");
ZipEntry stjZE = jf.getEntry("Potential_Bundles/20/org.apache.sling.commons.threads-3.2.0.jar");
try (InputStream is = jf.getInputStream(stjZE)) {
assertArtifactsEqual(stj, is);
}
File ctj = getMavenArtifactFile(mrr, "org.apache.sling", "org.apache.sling.commons.contentdetection", "1.0.2");
ZipEntry ctjZE = jf.getEntry("Potential_Bundles/100/org.apache.sling.commons.contentdetection-1.0.2.jar");
try (InputStream is = jf.getInputStream(ctjZE)) {
assertArtifactsEqual(ctj, is);
}
}
} finally {
FileUtils.deleteDirectory(new File(ppm.project.getBuild().getDirectory()));
}
}
private void assertArtifactsEqual(File f, InputStream is) throws IOException {
byte[] bytes1 = Files.readAllBytes(f.toPath());
byte[] bytes2 = IOUtils.toByteArray(is);
assertArrayEquals("Bytes not equal on file " + f.getName(), bytes1, bytes2);
}
private PreparePackageMojo getMojoUnderTest(String ... knownArtifacts) throws Exception {
File mrr = getMavenRepoRoot();
ArtifactHandler ah = Mockito.mock(ArtifactHandler.class);
ArtifactHandlerManager ahm = Mockito.mock(ArtifactHandlerManager.class);
Mockito.when(ahm.getArtifactHandler(Mockito.anyString())).thenReturn(ah);
Set<org.apache.maven.artifact.Artifact> artifacts = new HashSet<>();
for (String s : knownArtifacts) {
String[] parts = s.split("[/]");
switch (parts.length) {
case 3:
artifacts.add(getMavenArtifact(mrr, ah, parts[0], parts[1], parts[2]));
break;
case 4:
artifacts.add(getMavenArtifact(mrr, ah, parts[0], parts[1], parts[2], parts[3]));
break;
default: throw new IllegalStateException(s);
}
}
MavenProject mavenPrj = new MavenProject();
Build build = new Build();
Path tempDir = Files.createTempDirectory(getClass().getSimpleName());
build.setOutputDirectory(tempDir.toString());
build.setDirectory(tempDir.toString());
mavenPrj.setBuild(build);
mavenPrj.setDependencyArtifacts(artifacts);
PreparePackageMojo ppm = new PreparePackageMojo();
ppm.mavenSession = Mockito.mock(MavenSession.class);
ppm.project = mavenPrj;
ArchiverManager am = Mockito.mock(ArchiverManager.class);
UnArchiver ua = Mockito.mock(UnArchiver.class);
Mockito.when(am.getUnArchiver(Mockito.isA(File.class))).thenReturn(ua);
setPrivateField(ppm, "archiverManager", am);
setPrivateField(ppm, "artifactHandlerManager", ahm);
setPrivateField(ppm, "resolver", Mockito.mock(ArtifactResolver.class));
return ppm;
}
private org.apache.maven.artifact.Artifact getMavenArtifact(File repoRoot, ArtifactHandler ah, String gid, String aid, String ver) {
return getMavenArtifact(repoRoot, ah, gid, aid, ver, null);
}
private org.apache.maven.artifact.Artifact getMavenArtifact(File repoRoot, ArtifactHandler ah, String gid, String aid, String ver, String classifier) {
DefaultArtifact art = new DefaultArtifact(gid, aid, ver, "compile", "jar", classifier, ah);
art.setFile(getMavenArtifactFile(repoRoot, gid, aid, ver));
return art;
}
private File getMavenArtifactFile(File repoRoot, String gid, String aid, String ver) {
return new File(repoRoot, gid.replace('.', '/') + '/' + aid + '/' + ver + '/' + aid + '-' + ver + ".jar");
}
private File getMavenRepoRoot() throws IOException {
URL res = getClass().getClassLoader().getResource(
Test.class.getName().replace('.', '/') + ".class");
String u = res.toExternalForm();
if (u.startsWith("jar:"))
u = u.substring(4);
int idx = u.indexOf("junit");
if (idx < 0)
throw new IllegalStateException("Cannot infer maven repo root: " + res);
return new File(new URL(u.substring(0, idx)).getFile());
}
private void setPrivateField(Object obj, String name, Object val) throws Exception {
Field f = obj.getClass().getDeclaredField(name);
f.setAccessible(true);
f.set(obj, val);
}
}
| |
package org.jboss.summit2015.beacon.publishers.paho;
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttClient;
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.eclipse.paho.client.mqttv3.persist.MqttDefaultFilePersistence;
import org.jboss.logging.Logger;
import org.jboss.summit2015.beacon.Beacon;
import org.jboss.summit2015.beacon.bluez.BeaconInfo;
import org.jboss.summit2015.beacon.common.MsgPublisher;
import org.jboss.summit2015.beacon.scanner.MqttQOS;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.sql.Timestamp;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* @author Scott Stark (sstark@redhat.com) (C) 2014 Red Hat Inc.
*/
public class MqttPublisher implements MqttCallback, MsgPublisher {
private static Logger log = Logger.getLogger(MqttPublisher.class);
private ExecutorService publishService;
private MqttClient client;
private String brokerURL;
private boolean quietMode;
private MqttConnectOptions conOpt;
private boolean clean;
private String password;
private String userName;
private String clientID;
private File dataDir;
public MqttPublisher(String brokerUrl, String userName, String password) {
this(brokerUrl, userName, password, null);
}
public MqttPublisher(String brokerUrl, String userName, String password, String clientID) {
this.brokerURL = brokerUrl;
this.password = password;
this.userName = userName;
this.clientID = clientID;
}
public boolean isQuietMode() {
return quietMode;
}
public void setQuietMode(boolean quietMode) {
this.quietMode = quietMode;
}
public boolean isClean() {
return clean;
}
public void setClean(boolean clean) {
this.clean = clean;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getClientID() {
return clientID;
}
public void setClientID(String clientID) {
this.clientID = clientID;
}
public File getDataDir() {
return dataDir;
}
public void setDataDir(File dataDir) {
this.dataDir = dataDir;
}
// MqttCallback methods
@Override
public void connectionLost(Throwable cause) {
log.warn("connectionLost", cause);
}
@Override
public void messageArrived(String topic, MqttMessage message) throws Exception {
}
@Override
public void deliveryComplete(IMqttDeliveryToken token) {
}
@Override
public void publish(String destinationName, Beacon beacon) {
}
@Override
public void publishStatus(Beacon beacon) {
}
@Override
public void publishProperties(String destinationName, Properties properties) {
}
@Override
public void setDestinationName(String name) {
}
@Override
public String getDestinationName() {
return null;
}
@Override
public int getReconnectInterval() {
return 0;
}
@Override
public void setReconnectInterval(int reconnectInterval) {
}
@Override
public boolean isReconnectOnFailure() {
return false;
}
@Override
public void setReconnectOnFailure(boolean reconnectOnFailure) {
}
@Override
public boolean isConnected() {
return false;
}
@Override
public void setConnected(boolean connected) {
}
@Override
public void setUseTopics(boolean flag) {
}
@Override
public boolean isUseTopics() {
return false;
}
@Override
public boolean isUseTransactions() {
return false;
}
@Override
public void setUseTransactions(boolean useTransactions) {
}
@Override
public void publish(String destinationName, BeaconInfo beaconInfo) {
}
@Override
public void publishStatus(BeaconInfo beaconInfo) {
}
@Override
public void stop() {
if(publishService != null) {
publishService.shutdown();
publishService = null;
}
if(client != null) {
try {
client.disconnect();
} catch (MqttException e) {
log.warn("Failure during client disconnect", e);
}
client = null;
}
}
@Override
public void start(boolean asyncMode) throws IOException, MqttException {
publishService = Executors.newSingleThreadExecutor();
//This sample stores in a temporary directory... where messages temporarily
// stored until the message has been delivered to the server.
//..a real application ought to store them somewhere
// where they are not likely to get deleted or tampered with
String tmpDir = System.getProperty("java.io.tmpdir");
MqttDefaultFilePersistence dataStore = new MqttDefaultFilePersistence(tmpDir);
// Construct the connection options object that contains connection parameters
// such as cleanSession and LWT
conOpt = new MqttConnectOptions();
conOpt.setCleanSession(clean);
if (password != null) {
conOpt.setPassword(this.password.toCharArray());
}
if (userName != null) {
conOpt.setUserName(this.userName);
}
// Construct an MQTT blocking mode client
if(clientID == null) {
InetAddress addr = InetAddress.getLocalHost();
clientID = "Parser-" + addr.getHostAddress();
}
client = new MqttClient(this.brokerURL, clientID, dataStore);
// Set this wrapper as the callback handler
client.setCallback(this);
}
/**
* Queue a message for publish / send a message to an MQTT server using a background executor service
*
* @param topicName the name of the topic to publish to
* @param qos the quality of service to delivery the message at
* @param payload the set of bytes to send to the MQTT server
*/
public void queueForPublish(String topicName, MqttQOS qos, byte[] payload) {
publishService.submit(() -> {
try {
publish(topicName, qos, payload);
} catch (MqttException e) {
log.warn("Failed to publish beacon", e);
}
}
);
}
/**
* Directly publish / send a message to an MQTT server
*
* @param topicName the name of the topic to publish to
* @param qos the quality of service to delivery the message at
* @param payload the set of bytes to send to the MQTT server
* @throws MqttException
*/
public void publish(String topicName, MqttQOS qos, byte[] payload) throws MqttException {
// Connect to the MQTT server
log.debugf("Connecting to %s with client ID %s", brokerURL, client.getClientId());
client.connect(conOpt);
log.debug("Connected");
String time = new Timestamp(System.currentTimeMillis()).toString();
log.debugf("Publishing at: %s to topic '%s' qos: ", time, topicName, qos);
// Create and configure a message
MqttMessage message = new MqttMessage(payload);
message.setQos(qos.ordinal());
// Send the message to the server, control is not returned until
// it has been delivered to the server meeting the specified
// quality of service.
client.publish(topicName, message);
// Disconnect the client
client.disconnect();
log.debug("Disconnected");
}
}
| |
/*
* Copyright 2005-2007 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.ui;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.framework.BundleContext;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.base.api.ServerConfigurationService;
import org.wso2.carbon.ui.deployment.beans.CarbonUIDefinitions;
import org.wso2.carbon.ui.deployment.beans.Menu;
import org.wso2.carbon.ui.internal.CarbonUIServiceComponent;
import org.wso2.carbon.utils.CarbonUtils;
import org.wso2.carbon.utils.ConfigurationContextService;
import org.wso2.carbon.utils.NetworkUtils;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.base.ServerConfiguration;
/**
* Utility class for Carbon UI
*/
public class CarbonUIUtil {
public static final String QUERY_PARAM_LOCALE = "locale";
public static final String SESSION_PARAM_LOCALE = "custom_locale";
private static Log log = LogFactory.getLog(CarbonUIUtil.class);
private static BundleContext bundleContext = null;
//To store the product specific params
private static HashMap productParams = new HashMap();
private static final String PROXY_CONTEXT_PATH = "ProxyContextPath";
private static final String WORKER_PROXY_CONTEXT_PATH = "WorkerProxyContextPath";
/**
* Get a proxy object to the business logic implementation class.
* <p/>
* This proxy could be a handle to an OSGi service or a Web services client
*
* @param clientClassObject Web services client
* @param osgiObjectClass OSGi service class
* @param session The HTTP Session
* @return Proxy object
* @deprecated Do not use this method. Simply use the relevant client.
*/
public static Object getServerProxy(Object clientClassObject,
Class osgiObjectClass,
HttpSession session) {
return clientClassObject;
}
public static void setBundleContext(BundleContext context) {
bundleContext = context;
}
public static BundleContext getBundleContext() {
return bundleContext;
}
public static String getIndexPageURL(ServerConfigurationService serverConfig) {
return serverConfig.getFirstProperty(CarbonConstants.INDEX_PAGE_URL);
}
public static String getIndexPageURL(ServletContext servletContext, HttpSession httpSession) {
String url;
Object obj = httpSession.getAttribute(CarbonConstants.INDEX_PAGE_URL);
if (obj != null && obj instanceof String) {
// Index Page URL is present in the servlet session
url = (String) obj;
} else {
url = (String) servletContext.getAttribute(CarbonConstants.INDEX_PAGE_URL);
}
return url;
}
public static String getServerURL(ServerConfigurationService serverConfig) {
ConfigurationContext serverCfgCtx =
CarbonUIServiceComponent.getConfigurationContextService().getServerConfigContext();
return CarbonUtils.getServerURL(serverConfig, serverCfgCtx);
}
public static String getServerURL(ServletContext servletContext, HttpSession httpSession) {
return CarbonUtils.getServerURL(servletContext, httpSession,
CarbonUIServiceComponent.
getConfigurationContextService().getServerConfigContext());
}
public static boolean isSuperTenant(HttpServletRequest request) {
return request.getSession().getAttribute(MultitenantConstants.IS_SUPER_TENANT) != null &&
request.getSession().getAttribute(MultitenantConstants.IS_SUPER_TENANT)
.equals(Boolean.toString(true));
}
public static String https2httpURL(String url) {
if (url.indexOf("${carbon.https.port}") != -1) {
String httpPort = CarbonUtils.getTransportPort(CarbonUIServiceComponent
.getConfigurationContextService(), "http")
+ "";
url = url.replace("${carbon.https.port}", httpPort);
} else {
// TODO: This is a hack to gaurd against the above if condition failing.
// Need to dig into the root of the problem
url = url.replace("https", "http");
String httpsPort = CarbonUtils.getTransportPort(CarbonUIServiceComponent
.getConfigurationContextService(), "https")
+ "";
String httpPort = CarbonUtils.getTransportPort(CarbonUIServiceComponent
.getConfigurationContextService(), "http")
+ "";
url = url.replace(httpsPort, httpPort);
}
return url;
}
/**
* Returns url to admin console. eg: https://192.168.1.201:9443/wso2/carbon
*
* @param request The HTTPServletRequest
* @return The URL of the Admin Console
*/
public static String getAdminConsoleURL(HttpServletRequest request) {
// Hostname
String hostName = "localhost";
try {
hostName = NetworkUtils.getMgtHostName();
} catch (Exception ignored) {
}
// HTTPS port
String mgtConsoleTransport = CarbonUtils.getManagementTransport();
ConfigurationContextService configContextService = CarbonUIServiceComponent
.getConfigurationContextService();
int httpsPort = CarbonUtils.getTransportPort(configContextService, mgtConsoleTransport);
int httpsProxyPort =
CarbonUtils.getTransportProxyPort(configContextService.getServerConfigContext(),
mgtConsoleTransport);
// Context
String context = request.getContextPath();
if ("/".equals(context)) {
context = "";
}
String proxyContextPath = CarbonUIUtil.getProxyContextPath(false);
if (httpsPort == -1) {
return null;
}
String adminConsoleURL = null;
String enableHTTPAdminConsole = CarbonUIServiceComponent.getServerConfiguration()
.getFirstProperty(CarbonConstants.ENABLE_HTTP_ADMIN_CONSOLE);
if (enableHTTPAdminConsole != null
&& "true".equalsIgnoreCase(enableHTTPAdminConsole.trim())) {
int httpPort = CarbonUtils.getTransportPort(
CarbonUIServiceComponent.getConfigurationContextService(), "http");
adminConsoleURL = "http://" + hostName + ":" + httpPort + proxyContextPath + context + "/carbon/";
} else {
adminConsoleURL = "https://" + hostName + ":"
+ (httpsProxyPort != -1 ? httpsProxyPort : httpsPort) + proxyContextPath + context + "/carbon/";
}
if(log.isDebugEnabled()){
log.debug("Generated admin console URL: " + adminConsoleURL);
}
return adminConsoleURL;
}
/**
* Returns url to admin console.
*
* @param context Webapp context root of the Carbon webapp
* @return The URL of the Admin Console
*/
public static String getAdminConsoleURL(String context) {
// Hostname
String hostName = "localhost";
try {
hostName = NetworkUtils.getMgtHostName();
} catch (Exception ignored) {
}
// HTTPS port
String mgtConsoleTransport = CarbonUtils.getManagementTransport();
ConfigurationContextService configContextService = CarbonUIServiceComponent
.getConfigurationContextService();
int httpsPort = CarbonUtils.getTransportPort(configContextService, mgtConsoleTransport);
int httpsProxyPort =
CarbonUtils.getTransportProxyPort(configContextService.getServerConfigContext(),
mgtConsoleTransport);
// Context
if ("/".equals(context)) {
context = "";
}
String proxyContextPath = CarbonUIUtil.getProxyContextPath(false);
String adminConsoleURL = "https://" + hostName + ":" + (httpsProxyPort != -1 ? httpsProxyPort : httpsPort) +
proxyContextPath + context + "/carbon/";
if(log.isDebugEnabled()){
log.debug("Generated admin console URL: " + adminConsoleURL);
}
return adminConsoleURL;
}
/**
* Get a ServerConfiguration Property
*
* @param propertyName Name of the property
* @return the property
*/
public static String getServerConfigurationProperty(String propertyName) {
try {
ServerConfigurationService serverConfig = CarbonUIServiceComponent.getServerConfiguration();
return serverConfig.getFirstProperty(propertyName);
} catch (Exception e) {
String msg = "ServerConfiguration Service not available";
log.error(msg, e);
}
return null;
}
public static boolean isContextRegistered(ServletConfig config, String context) {
URL url;
try {
url = config.getServletContext().getResource(context);
} catch (MalformedURLException e) {
return false;
}
if (url == null) {
return false;
} else if (url.toString().indexOf(context) != -1) {
return true;
}
return false;
}
public static Locale toLocale(String localeQuery){
String localeInfo[] = localeQuery.split("_");
int size = localeInfo.length;
Locale locale;
switch (size){
case 2:
locale = new Locale(localeInfo[0],localeInfo[1]);
break;
case 3:
locale = new Locale(localeInfo[0],localeInfo[1],localeInfo[2]);
break;
default:
locale = new Locale(localeInfo[0]);
break;
}
return locale;
}
public static void setLocaleToSession (HttpServletRequest request)
{
if (request.getParameter(QUERY_PARAM_LOCALE) != null) {
request.getSession().setAttribute(CarbonUIUtil.SESSION_PARAM_LOCALE, request.getParameter(QUERY_PARAM_LOCALE));
}
}
public static Locale getLocaleFromSession (HttpServletRequest request)
{
if (request.getSession().getAttribute(SESSION_PARAM_LOCALE) != null) {
String custom_locale = request.getSession().getAttribute(SESSION_PARAM_LOCALE).toString();
return toLocale(custom_locale);
} else {
return request.getLocale();
}
}
/**
* Returns internationalized string for supplied key.
*
* @param key - key to look for
* @param i18nBundle - resource bundle
* @param language - language
* @return internationalized key value of key, if no value can be derived
*/
public static String geti18nString(String key, String i18nBundle, String language) {
Locale locale = new Locale(language);
String text = geti18nString(key, i18nBundle, locale);
return text;
}
/**
* Returns internationalized string for supplied key.
*
* @param key - key to look for
* @param i18nBundle - resource bundle
* @param locale - locale
* @return internationalized key value of key, if no value can be derived
*/
public static String geti18nString(String key, String i18nBundle, Locale locale) {
java.util.ResourceBundle resourceBundle = null;
if (i18nBundle != null) {
try {
resourceBundle = java.util.ResourceBundle.getBundle(i18nBundle, locale);
} catch (java.util.MissingResourceException e) {
if (log.isDebugEnabled()) {
log.debug("Cannot find resource bundle : " + i18nBundle + " for locale : "
+ locale);
}
}
}
String text = key;
if (resourceBundle != null) {
String tmp = null;
try {
tmp = resourceBundle.getString(key);
} catch (java.util.MissingResourceException e) {
// Missing key should not be a blocking factor for UI rendering
if (log.isDebugEnabled()) {
log.debug("Cannot find resource for key :" + key);
}
}
if (tmp != null) {
text = tmp;
}
}
return text;
}
/**
* Removed menu item from current user's session. Only current user's menu
* items are effected.
*
* @param menuId
* @param request
* @see CarbonUIDefinitions#removeMenuDefinition(String)
*/
public static void removeMenuDefinition(String menuId, HttpServletRequest request) {
// TODO : consider removing child menu items as well
ArrayList<Menu> modifiedMenuDefs = new ArrayList<Menu>();
Menu[] currentMenus = (Menu[]) request.getSession().getAttribute(
MenuAdminClient.USER_MENU_ITEMS);
boolean modified = false;
if (currentMenus != null) {
if (menuId != null && menuId.trim().length() > 0) {
for (int a = 0; a < currentMenus.length; a++) {
Menu menu = currentMenus[a];
if (menu != null) {
if (!menuId.equals(menu.getId())) {
modifiedMenuDefs.add(menu);
modified = true;
} else {
if (log.isDebugEnabled()) {
log.debug("Removing menu item : " + menuId);
}
}
}
}
if (modified) {
Menu[] newMenuDefs = new Menu[modifiedMenuDefs.size()];
newMenuDefs = modifiedMenuDefs.toArray(newMenuDefs);
request.getSession().setAttribute(MenuAdminClient.USER_MENU_ITEMS, newMenuDefs);
}
}
}
}
public static String getBundleResourcePath(String resourceName) {
if (resourceName == null || resourceName.length() == 0) {
return null;
}
String resourcePath = resourceName;
resourcePath = resourcePath.startsWith("/") ? resourcePath.substring(1) : resourcePath;
resourcePath = (resourcePath.lastIndexOf('/') != -1) ? resourcePath.substring(0,
resourcePath.indexOf('/')) : resourcePath;
return resourcePath;
}
/**
* This method is a helper method for checking UI permissions.
*/
@SuppressWarnings("unchecked")
public static boolean isUserAuthorized(HttpServletRequest request, String resource) {
boolean isAuthorized = false;
List<String> permissions = (List<String>) request.getSession().getAttribute(
CarbonConstants.UI_USER_PERMISSIONS);
if (permissions == null) {
return false;
}
for (String permission : permissions) {
if (resource.startsWith(permission)) {
isAuthorized = true;
break;
}
}
return isAuthorized;
}
/**
* Method is used to retrive product xml params
*
* @param key = product xml key
* @return product xml value
*/
public static Object getProductParam(String key) {
return productParams.get(key);
}
public static void setProductParam(String key, Object value) {
productParams.put(key, value);
}
/**
* Returns home page location for "Home" link in Carbon UI menu.
* If defaultHomePage property is available in product.xml this method will return it and if not it'll return
* default ../admin/index.jsp
*
* @return home page location
*/
public static String getHomePage() {
Object homePage;
if ((homePage = getDefaultHomePageProductParam())
!= null) {
String homePageLocation = (String) homePage;
if (!homePageLocation.startsWith("/")) {
// it is assumed that homepage location is provided as a relative path starting
// from carbon context. This is to support the re-direction url at the login.
// Therefore here we fix the location to suit the homepage link of the product.
homePageLocation = "../../" + homePageLocation;
}
return homePageLocation;
}
return CarbonConstants.CARBON_UI_DEFAULT_HOME_PAGE;
}
public static String removeTenantSpecificStringsFromURL(String requestURL) {
if (requestURL.contains("/" + MultitenantConstants.TENANT_AWARE_URL_PREFIX + "/")) {
int tenantPrefixIndex = requestURL.lastIndexOf("/" +
MultitenantConstants.TENANT_AWARE_URL_PREFIX +
"/");
requestURL = requestURL.substring(tenantPrefixIndex +
MultitenantConstants.TENANT_AWARE_URL_PREFIX.length() +
2);
// bypassing tenantDomain part
int pageUrlIndex = requestURL.indexOf('/');
requestURL = requestURL.substring(pageUrlIndex);
}
return requestURL;
}
private static Object getDefaultHomePageProductParam() {
return getProductParam(CarbonConstants.PRODUCT_XML_WSO2CARBON + CarbonConstants.DEFAULT_HOME_PAGE);
}
/**
* Returns the proxy context path value specified in the carbon.xml.(Duplicated Util Method)
*
* @param isWorkerNode If isWorkerNode is true then this method returns the proxy context path of the
* corresponding worker node. If the worker proxy context path is not specified, this method
* returns the value specified for the proxy context path.
* @return the proxy context path value.
*/
private static String getProxyContextPath(boolean isWorkerNode) {
String proxyContextPath = "";
if (isWorkerNode) {
proxyContextPath = getProxyContextPathValue(WORKER_PROXY_CONTEXT_PATH);
if ("".equals(proxyContextPath)) {
proxyContextPath = getProxyContextPathValue(PROXY_CONTEXT_PATH);
}
} else {
proxyContextPath = getProxyContextPathValue(PROXY_CONTEXT_PATH);
}
if(log.isDebugEnabled()){
log.debug("Proxy context path : " + proxyContextPath);
}
return proxyContextPath;
}
/**
* Retrieves the proxy context path from the ServerConfiguration and process it before returning. (Duplicated Util Method)
*
* @param key Property key
* @return the processed proxy context path.
*/
private static String getProxyContextPathValue(String key) {
String proxyContextPath = ServerConfiguration.getInstance().getFirstProperty(key);
if (proxyContextPath == null || proxyContextPath.length() == 0 | "/".equals(proxyContextPath)) {
proxyContextPath = "";
} else {
proxyContextPath = proxyContextPath.trim();
if (!proxyContextPath.startsWith("/")) {
proxyContextPath = "/" + proxyContextPath;
}
}
return proxyContextPath;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2beta1/dlp.proto
package com.google.privacy.dlp.v2beta1;
/**
* <pre>
* High level summary of deidentification.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2beta1.DeidentificationSummary}
*/
public final class DeidentificationSummary extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2beta1.DeidentificationSummary)
DeidentificationSummaryOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeidentificationSummary.newBuilder() to construct.
private DeidentificationSummary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeidentificationSummary() {
transformedBytes_ = 0L;
transformationSummaries_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DeidentificationSummary(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 16: {
transformedBytes_ = input.readInt64();
break;
}
case 26: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
transformationSummaries_ = new java.util.ArrayList<com.google.privacy.dlp.v2beta1.TransformationSummary>();
mutable_bitField0_ |= 0x00000002;
}
transformationSummaries_.add(
input.readMessage(com.google.privacy.dlp.v2beta1.TransformationSummary.parser(), extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
transformationSummaries_ = java.util.Collections.unmodifiableList(transformationSummaries_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_DeidentificationSummary_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_DeidentificationSummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2beta1.DeidentificationSummary.class, com.google.privacy.dlp.v2beta1.DeidentificationSummary.Builder.class);
}
private int bitField0_;
public static final int TRANSFORMED_BYTES_FIELD_NUMBER = 2;
private long transformedBytes_;
/**
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*/
public long getTransformedBytes() {
return transformedBytes_;
}
public static final int TRANSFORMATION_SUMMARIES_FIELD_NUMBER = 3;
private java.util.List<com.google.privacy.dlp.v2beta1.TransformationSummary> transformationSummaries_;
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public java.util.List<com.google.privacy.dlp.v2beta1.TransformationSummary> getTransformationSummariesList() {
return transformationSummaries_;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder>
getTransformationSummariesOrBuilderList() {
return transformationSummaries_;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public int getTransformationSummariesCount() {
return transformationSummaries_.size();
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummary getTransformationSummaries(int index) {
return transformationSummaries_.get(index);
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder getTransformationSummariesOrBuilder(
int index) {
return transformationSummaries_.get(index);
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (transformedBytes_ != 0L) {
output.writeInt64(2, transformedBytes_);
}
for (int i = 0; i < transformationSummaries_.size(); i++) {
output.writeMessage(3, transformationSummaries_.get(i));
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (transformedBytes_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, transformedBytes_);
}
for (int i = 0; i < transformationSummaries_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, transformationSummaries_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2beta1.DeidentificationSummary)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2beta1.DeidentificationSummary other = (com.google.privacy.dlp.v2beta1.DeidentificationSummary) obj;
boolean result = true;
result = result && (getTransformedBytes()
== other.getTransformedBytes());
result = result && getTransformationSummariesList()
.equals(other.getTransformationSummariesList());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TRANSFORMED_BYTES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getTransformedBytes());
if (getTransformationSummariesCount() > 0) {
hash = (37 * hash) + TRANSFORMATION_SUMMARIES_FIELD_NUMBER;
hash = (53 * hash) + getTransformationSummariesList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2beta1.DeidentificationSummary prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* High level summary of deidentification.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2beta1.DeidentificationSummary}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2beta1.DeidentificationSummary)
com.google.privacy.dlp.v2beta1.DeidentificationSummaryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_DeidentificationSummary_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_DeidentificationSummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2beta1.DeidentificationSummary.class, com.google.privacy.dlp.v2beta1.DeidentificationSummary.Builder.class);
}
// Construct using com.google.privacy.dlp.v2beta1.DeidentificationSummary.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTransformationSummariesFieldBuilder();
}
}
public Builder clear() {
super.clear();
transformedBytes_ = 0L;
if (transformationSummariesBuilder_ == null) {
transformationSummaries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
} else {
transformationSummariesBuilder_.clear();
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.privacy.dlp.v2beta1.DlpProto.internal_static_google_privacy_dlp_v2beta1_DeidentificationSummary_descriptor;
}
public com.google.privacy.dlp.v2beta1.DeidentificationSummary getDefaultInstanceForType() {
return com.google.privacy.dlp.v2beta1.DeidentificationSummary.getDefaultInstance();
}
public com.google.privacy.dlp.v2beta1.DeidentificationSummary build() {
com.google.privacy.dlp.v2beta1.DeidentificationSummary result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.privacy.dlp.v2beta1.DeidentificationSummary buildPartial() {
com.google.privacy.dlp.v2beta1.DeidentificationSummary result = new com.google.privacy.dlp.v2beta1.DeidentificationSummary(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.transformedBytes_ = transformedBytes_;
if (transformationSummariesBuilder_ == null) {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
transformationSummaries_ = java.util.Collections.unmodifiableList(transformationSummaries_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.transformationSummaries_ = transformationSummaries_;
} else {
result.transformationSummaries_ = transformationSummariesBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2beta1.DeidentificationSummary) {
return mergeFrom((com.google.privacy.dlp.v2beta1.DeidentificationSummary)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2beta1.DeidentificationSummary other) {
if (other == com.google.privacy.dlp.v2beta1.DeidentificationSummary.getDefaultInstance()) return this;
if (other.getTransformedBytes() != 0L) {
setTransformedBytes(other.getTransformedBytes());
}
if (transformationSummariesBuilder_ == null) {
if (!other.transformationSummaries_.isEmpty()) {
if (transformationSummaries_.isEmpty()) {
transformationSummaries_ = other.transformationSummaries_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureTransformationSummariesIsMutable();
transformationSummaries_.addAll(other.transformationSummaries_);
}
onChanged();
}
} else {
if (!other.transformationSummaries_.isEmpty()) {
if (transformationSummariesBuilder_.isEmpty()) {
transformationSummariesBuilder_.dispose();
transformationSummariesBuilder_ = null;
transformationSummaries_ = other.transformationSummaries_;
bitField0_ = (bitField0_ & ~0x00000002);
transformationSummariesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getTransformationSummariesFieldBuilder() : null;
} else {
transformationSummariesBuilder_.addAllMessages(other.transformationSummaries_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.privacy.dlp.v2beta1.DeidentificationSummary parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.privacy.dlp.v2beta1.DeidentificationSummary) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long transformedBytes_ ;
/**
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*/
public long getTransformedBytes() {
return transformedBytes_;
}
/**
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*/
public Builder setTransformedBytes(long value) {
transformedBytes_ = value;
onChanged();
return this;
}
/**
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*/
public Builder clearTransformedBytes() {
transformedBytes_ = 0L;
onChanged();
return this;
}
private java.util.List<com.google.privacy.dlp.v2beta1.TransformationSummary> transformationSummaries_ =
java.util.Collections.emptyList();
private void ensureTransformationSummariesIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
transformationSummaries_ = new java.util.ArrayList<com.google.privacy.dlp.v2beta1.TransformationSummary>(transformationSummaries_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2beta1.TransformationSummary, com.google.privacy.dlp.v2beta1.TransformationSummary.Builder, com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder> transformationSummariesBuilder_;
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public java.util.List<com.google.privacy.dlp.v2beta1.TransformationSummary> getTransformationSummariesList() {
if (transformationSummariesBuilder_ == null) {
return java.util.Collections.unmodifiableList(transformationSummaries_);
} else {
return transformationSummariesBuilder_.getMessageList();
}
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public int getTransformationSummariesCount() {
if (transformationSummariesBuilder_ == null) {
return transformationSummaries_.size();
} else {
return transformationSummariesBuilder_.getCount();
}
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummary getTransformationSummaries(int index) {
if (transformationSummariesBuilder_ == null) {
return transformationSummaries_.get(index);
} else {
return transformationSummariesBuilder_.getMessage(index);
}
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder setTransformationSummaries(
int index, com.google.privacy.dlp.v2beta1.TransformationSummary value) {
if (transformationSummariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTransformationSummariesIsMutable();
transformationSummaries_.set(index, value);
onChanged();
} else {
transformationSummariesBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder setTransformationSummaries(
int index, com.google.privacy.dlp.v2beta1.TransformationSummary.Builder builderForValue) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.set(index, builderForValue.build());
onChanged();
} else {
transformationSummariesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder addTransformationSummaries(com.google.privacy.dlp.v2beta1.TransformationSummary value) {
if (transformationSummariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(value);
onChanged();
} else {
transformationSummariesBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder addTransformationSummaries(
int index, com.google.privacy.dlp.v2beta1.TransformationSummary value) {
if (transformationSummariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(index, value);
onChanged();
} else {
transformationSummariesBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder addTransformationSummaries(
com.google.privacy.dlp.v2beta1.TransformationSummary.Builder builderForValue) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(builderForValue.build());
onChanged();
} else {
transformationSummariesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder addTransformationSummaries(
int index, com.google.privacy.dlp.v2beta1.TransformationSummary.Builder builderForValue) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(index, builderForValue.build());
onChanged();
} else {
transformationSummariesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder addAllTransformationSummaries(
java.lang.Iterable<? extends com.google.privacy.dlp.v2beta1.TransformationSummary> values) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, transformationSummaries_);
onChanged();
} else {
transformationSummariesBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder clearTransformationSummaries() {
if (transformationSummariesBuilder_ == null) {
transformationSummaries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
transformationSummariesBuilder_.clear();
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public Builder removeTransformationSummaries(int index) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.remove(index);
onChanged();
} else {
transformationSummariesBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummary.Builder getTransformationSummariesBuilder(
int index) {
return getTransformationSummariesFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder getTransformationSummariesOrBuilder(
int index) {
if (transformationSummariesBuilder_ == null) {
return transformationSummaries_.get(index); } else {
return transformationSummariesBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder>
getTransformationSummariesOrBuilderList() {
if (transformationSummariesBuilder_ != null) {
return transformationSummariesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(transformationSummaries_);
}
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummary.Builder addTransformationSummariesBuilder() {
return getTransformationSummariesFieldBuilder().addBuilder(
com.google.privacy.dlp.v2beta1.TransformationSummary.getDefaultInstance());
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public com.google.privacy.dlp.v2beta1.TransformationSummary.Builder addTransformationSummariesBuilder(
int index) {
return getTransformationSummariesFieldBuilder().addBuilder(
index, com.google.privacy.dlp.v2beta1.TransformationSummary.getDefaultInstance());
}
/**
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2beta1.TransformationSummary transformation_summaries = 3;</code>
*/
public java.util.List<com.google.privacy.dlp.v2beta1.TransformationSummary.Builder>
getTransformationSummariesBuilderList() {
return getTransformationSummariesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2beta1.TransformationSummary, com.google.privacy.dlp.v2beta1.TransformationSummary.Builder, com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder>
getTransformationSummariesFieldBuilder() {
if (transformationSummariesBuilder_ == null) {
transformationSummariesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2beta1.TransformationSummary, com.google.privacy.dlp.v2beta1.TransformationSummary.Builder, com.google.privacy.dlp.v2beta1.TransformationSummaryOrBuilder>(
transformationSummaries_,
((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
transformationSummaries_ = null;
}
return transformationSummariesBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2beta1.DeidentificationSummary)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2beta1.DeidentificationSummary)
private static final com.google.privacy.dlp.v2beta1.DeidentificationSummary DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2beta1.DeidentificationSummary();
}
public static com.google.privacy.dlp.v2beta1.DeidentificationSummary getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeidentificationSummary>
PARSER = new com.google.protobuf.AbstractParser<DeidentificationSummary>() {
public DeidentificationSummary parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeidentificationSummary(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeidentificationSummary> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeidentificationSummary> getParserForType() {
return PARSER;
}
public com.google.privacy.dlp.v2beta1.DeidentificationSummary getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.models.glove;
import lombok.NonNull;
import org.deeplearning4j.models.embeddings.WeightLookupTable;
import org.deeplearning4j.models.embeddings.learning.impl.elements.GloVe;
import org.deeplearning4j.models.embeddings.loader.VectorsConfiguration;
import org.deeplearning4j.models.embeddings.reader.ModelUtils;
import org.deeplearning4j.models.embeddings.wordvectors.WordVectors;
import org.deeplearning4j.models.sequencevectors.SequenceVectors;
import org.deeplearning4j.models.sequencevectors.interfaces.SequenceIterator;
import org.deeplearning4j.models.sequencevectors.interfaces.VectorsListener;
import org.deeplearning4j.models.sequencevectors.iterators.AbstractSequenceIterator;
import org.deeplearning4j.models.sequencevectors.transformers.impl.SentenceTransformer;
import org.deeplearning4j.models.word2vec.VocabWord;
import org.deeplearning4j.models.word2vec.wordstore.VocabCache;
import org.deeplearning4j.text.documentiterator.DocumentIterator;
import org.deeplearning4j.text.sentenceiterator.SentenceIterator;
import org.deeplearning4j.text.sentenceiterator.StreamLineIterator;
import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory;
import java.util.Collection;
import java.util.List;
/**
* GlobalVectors standalone implementation for DL4j.
* Based on original Stanford GloVe <a href="http://www-nlp.stanford.edu/pubs/glove.pdf">http://www-nlp.stanford.edu/pubs/glove.pdf</a>
*
* @author raver119@gmail.com
*/
public class Glove extends SequenceVectors<VocabWord> {
protected Glove() {
}
public static class Builder extends SequenceVectors.Builder<VocabWord> {
private double xMax;
private boolean shuffle;
private boolean symmetric;
protected double alpha = 0.75d;
private int maxmemory = (int) (Runtime.getRuntime().totalMemory() / 1024 / 1024 / 1024);
protected TokenizerFactory tokenFactory;
protected SentenceIterator sentenceIterator;
protected DocumentIterator documentIterator;
public Builder() {
super();
}
public Builder(@NonNull VectorsConfiguration configuration) {
super(configuration);
}
/**
* This method has no effect for GloVe
*
* @param vec existing WordVectors model
* @return
*/
@Override
public Builder useExistingWordVectors(@NonNull WordVectors vec) {
return this;
}
@Override
public Builder iterate(@NonNull SequenceIterator<VocabWord> iterator) {
super.iterate(iterator);
return this;
}
/**
* Specifies minibatch size for training process.
*
* @param batchSize
* @return
*/
@Override
public Builder batchSize(int batchSize) {
super.batchSize(batchSize);
return this;
}
/**
* Ierations and epochs are the same in GloVe implementation.
*
* @param iterations
* @return
*/
@Override
public Builder iterations(int iterations) {
super.epochs(iterations);
return this;
}
/**
* Sets the number of iteration over training corpus during training
*
* @param numEpochs
* @return
*/
@Override
public Builder epochs(int numEpochs) {
super.epochs(numEpochs);
return this;
}
@Override
public Builder useAdaGrad(boolean reallyUse) {
super.useAdaGrad(true);
return this;
}
@Override
public Builder layerSize(int layerSize) {
super.layerSize(layerSize);
return this;
}
@Override
public Builder learningRate(double learningRate) {
super.learningRate(learningRate);
return this;
}
/**
* Sets minimum word frequency during vocabulary mastering.
* Please note: this option is ignored, if vocabulary is built outside of GloVe
*
* @param minWordFrequency
* @return
*/
@Override
public Builder minWordFrequency(int minWordFrequency) {
super.minWordFrequency(minWordFrequency);
return this;
}
@Override
public Builder minLearningRate(double minLearningRate) {
super.minLearningRate(minLearningRate);
return this;
}
@Override
public Builder resetModel(boolean reallyReset) {
super.resetModel(reallyReset);
return this;
}
@Override
public Builder vocabCache(@NonNull VocabCache<VocabWord> vocabCache) {
super.vocabCache(vocabCache);
return this;
}
@Override
public Builder lookupTable(@NonNull WeightLookupTable<VocabWord> lookupTable) {
super.lookupTable(lookupTable);
return this;
}
@Override
@Deprecated
public Builder sampling(double sampling) {
super.sampling(sampling);
return this;
}
@Override
@Deprecated
public Builder negativeSample(double negative) {
super.negativeSample(negative);
return this;
}
@Override
public Builder stopWords(@NonNull List<String> stopList) {
super.stopWords(stopList);
return this;
}
@Override
public Builder trainElementsRepresentation(boolean trainElements) {
super.trainElementsRepresentation(true);
return this;
}
@Override
@Deprecated
public Builder trainSequencesRepresentation(boolean trainSequences) {
super.trainSequencesRepresentation(false);
return this;
}
@Override
public Builder stopWords(@NonNull Collection<VocabWord> stopList) {
super.stopWords(stopList);
return this;
}
@Override
public Builder windowSize(int windowSize) {
super.windowSize(windowSize);
return this;
}
@Override
public Builder seed(long randomSeed) {
super.seed(randomSeed);
return this;
}
@Override
public Builder workers(int numWorkers) {
super.workers(numWorkers);
return this;
}
/**
* Sets TokenizerFactory to be used for training
*
* @param tokenizerFactory
* @return
*/
public Builder tokenizerFactory(@NonNull TokenizerFactory tokenizerFactory) {
this.tokenFactory = tokenizerFactory;
return this;
}
/**
* Parameter specifying cutoff in weighting function; default 100.0
*
* @param xMax
* @return
*/
public Builder xMax(double xMax) {
this.xMax = xMax;
return this;
}
/**
* Parameters specifying, if cooccurrences list should be build into both directions from any current word.
*
* @param reallySymmetric
* @return
*/
public Builder symmetric(boolean reallySymmetric) {
this.symmetric = reallySymmetric;
return this;
}
/**
* Parameter specifying, if cooccurrences list should be shuffled between training epochs
*
* @param reallyShuffle
* @return
*/
public Builder shuffle(boolean reallyShuffle) {
this.shuffle = reallyShuffle;
return this;
}
/**
* This method has no effect for ParagraphVectors
*
* @param windows
* @return
*/
@Override
public Builder useVariableWindow(int... windows) {
// no-op
return this;
}
/**
* Parameter in exponent of weighting function; default 0.75
*
* @param alpha
* @return
*/
public Builder alpha(double alpha) {
this.alpha = alpha;
return this;
}
public Builder iterate(@NonNull SentenceIterator iterator) {
this.sentenceIterator = iterator;
return this;
}
public Builder iterate(@NonNull DocumentIterator iterator) {
this.sentenceIterator = new StreamLineIterator.Builder(iterator).setFetchSize(100).build();
return this;
}
/**
* Sets ModelUtils that gonna be used as provider for utility methods: similarity(), wordsNearest(), accuracy(), etc
*
* @param modelUtils model utils to be used
* @return
*/
@Override
public Builder modelUtils(@NonNull ModelUtils<VocabWord> modelUtils) {
super.modelUtils(modelUtils);
return this;
}
/**
* This method sets VectorsListeners for this SequenceVectors model
*
* @param vectorsListeners
* @return
*/
@Override
public Builder setVectorsListeners(@NonNull Collection<VectorsListener<VocabWord>> vectorsListeners) {
super.setVectorsListeners(vectorsListeners);
return this;
}
/**
* This method allows you to specify maximum memory available for CoOccurrence map builder.
*
* Please note: this option can be considered a debugging method. In most cases setting proper -Xmx argument set to JVM is enough to limit this algorithm.
* Please note: this option won't override -Xmx JVM value.
*
* @param gbytes memory limit, in gigabytes
* @return
*/
public Builder maxMemory(int gbytes) {
this.maxmemory = gbytes;
return this;
}
/**
* This method allows you to specify SequenceElement that will be used as UNK element, if UNK is used
*
* @param element
* @return
*/
@Override
public Builder unknownElement(VocabWord element) {
super.unknownElement(element);
return this;
}
/**
* This method allows you to specify, if UNK word should be used internally
*
* @param reallyUse
* @return
*/
@Override
public Builder useUnknown(boolean reallyUse) {
super.useUnknown(reallyUse);
if (this.unknownElement == null) {
this.unknownElement(new VocabWord(1.0, Glove.DEFAULT_UNK));
}
return this;
}
public Glove build() {
presetTables();
Glove ret = new Glove();
// hardcoded value for glove
if (sentenceIterator != null) {
SentenceTransformer transformer = new SentenceTransformer.Builder().iterator(sentenceIterator)
.tokenizerFactory(tokenFactory).build();
this.iterator = new AbstractSequenceIterator.Builder<>(transformer).build();
}
ret.trainElementsVectors = true;
ret.trainSequenceVectors = false;
ret.useAdeGrad = true;
this.useAdaGrad = true;
ret.learningRate.set(this.learningRate);
ret.resetModel = this.resetModel;
ret.batchSize = this.batchSize;
ret.iterator = this.iterator;
ret.numEpochs = this.numEpochs;
ret.numIterations = this.iterations;
ret.layerSize = this.layerSize;
ret.useUnknown = this.useUnknown;
ret.unknownElement = this.unknownElement;
this.configuration.setLearningRate(this.learningRate);
this.configuration.setLayersSize(layerSize);
this.configuration.setHugeModelExpected(hugeModelExpected);
this.configuration.setWindow(window);
this.configuration.setMinWordFrequency(minWordFrequency);
this.configuration.setIterations(iterations);
this.configuration.setSeed(seed);
this.configuration.setBatchSize(batchSize);
this.configuration.setLearningRateDecayWords(learningRateDecayWords);
this.configuration.setMinLearningRate(minLearningRate);
this.configuration.setSampling(this.sampling);
this.configuration.setUseAdaGrad(useAdaGrad);
this.configuration.setNegative(negative);
this.configuration.setEpochs(this.numEpochs);
ret.configuration = this.configuration;
ret.lookupTable = this.lookupTable;
ret.vocab = this.vocabCache;
ret.modelUtils = this.modelUtils;
ret.eventListeners = this.vectorsListeners;
ret.elementsLearningAlgorithm = new GloVe.Builder<VocabWord>().learningRate(this.learningRate)
.shuffle(this.shuffle).symmetric(this.symmetric).xMax(this.xMax).alpha(this.alpha)
.maxMemory(maxmemory).build();
return ret;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Contains the result of a successful invocation of the <code>DescribeReservedDBInstances</code> action.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/DescribeReservedDBInstances" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeReservedDBInstancesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*/
private String marker;
/**
* <p>
* A list of reserved DB instances.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<ReservedDBInstance> reservedDBInstances;
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @return An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* </p>
*
* @param marker
* An optional pagination token provided by a previous request. If this parameter is specified, the response
* includes only records beyond the marker, up to the value specified by <code>MaxRecords</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeReservedDBInstancesResult withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* <p>
* A list of reserved DB instances.
* </p>
*
* @return A list of reserved DB instances.
*/
public java.util.List<ReservedDBInstance> getReservedDBInstances() {
if (reservedDBInstances == null) {
reservedDBInstances = new com.amazonaws.internal.SdkInternalList<ReservedDBInstance>();
}
return reservedDBInstances;
}
/**
* <p>
* A list of reserved DB instances.
* </p>
*
* @param reservedDBInstances
* A list of reserved DB instances.
*/
public void setReservedDBInstances(java.util.Collection<ReservedDBInstance> reservedDBInstances) {
if (reservedDBInstances == null) {
this.reservedDBInstances = null;
return;
}
this.reservedDBInstances = new com.amazonaws.internal.SdkInternalList<ReservedDBInstance>(reservedDBInstances);
}
/**
* <p>
* A list of reserved DB instances.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setReservedDBInstances(java.util.Collection)} or {@link #withReservedDBInstances(java.util.Collection)}
* if you want to override the existing values.
* </p>
*
* @param reservedDBInstances
* A list of reserved DB instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeReservedDBInstancesResult withReservedDBInstances(ReservedDBInstance... reservedDBInstances) {
if (this.reservedDBInstances == null) {
setReservedDBInstances(new com.amazonaws.internal.SdkInternalList<ReservedDBInstance>(reservedDBInstances.length));
}
for (ReservedDBInstance ele : reservedDBInstances) {
this.reservedDBInstances.add(ele);
}
return this;
}
/**
* <p>
* A list of reserved DB instances.
* </p>
*
* @param reservedDBInstances
* A list of reserved DB instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeReservedDBInstancesResult withReservedDBInstances(java.util.Collection<ReservedDBInstance> reservedDBInstances) {
setReservedDBInstances(reservedDBInstances);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker()).append(",");
if (getReservedDBInstances() != null)
sb.append("ReservedDBInstances: ").append(getReservedDBInstances());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeReservedDBInstancesResult == false)
return false;
DescribeReservedDBInstancesResult other = (DescribeReservedDBInstancesResult) obj;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
if (other.getReservedDBInstances() == null ^ this.getReservedDBInstances() == null)
return false;
if (other.getReservedDBInstances() != null && other.getReservedDBInstances().equals(this.getReservedDBInstances()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime * hashCode + ((getReservedDBInstances() == null) ? 0 : getReservedDBInstances().hashCode());
return hashCode;
}
@Override
public DescribeReservedDBInstancesResult clone() {
try {
return (DescribeReservedDBInstancesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package psidev.psi.mi.jami.model.impl;
import psidev.psi.mi.jami.listener.EntityInteractorChangeListener;
import psidev.psi.mi.jami.model.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
/**
* Abstract class for Entity
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>09/07/13</pre>
*/
public abstract class AbstractEntity<F extends Feature> implements Entity<F> {
private Interactor interactor;
private Stoichiometry stoichiometry;
private Collection<CausalRelationship> causalRelationships;
private Collection<F> features;
private EntityInteractorChangeListener changeListener;
/**
* <p>Constructor for AbstractEntity.</p>
*
* @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object.
*/
public AbstractEntity(Interactor interactor){
if (interactor == null){
throw new IllegalArgumentException("The interactor cannot be null.");
}
this.interactor = interactor;
}
/**
* <p>Constructor for AbstractEntity.</p>
*
* @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object.
* @param stoichiometry a {@link psidev.psi.mi.jami.model.Stoichiometry} object.
*/
public AbstractEntity(Interactor interactor, Stoichiometry stoichiometry){
this(interactor);
this.stoichiometry = stoichiometry;
}
/**
* <p>initialiseFeatures</p>
*/
protected void initialiseFeatures(){
this.features = new ArrayList<F>();
}
/**
* <p>initialiseCausalRelationships</p>
*/
protected void initialiseCausalRelationships(){
this.causalRelationships = new ArrayList<CausalRelationship>();
}
/**
* <p>initialiseCausalRelationshipsWith</p>
*
* @param relationships a {@link java.util.Collection} object.
*/
protected void initialiseCausalRelationshipsWith(Collection<CausalRelationship> relationships) {
if (relationships == null){
this.causalRelationships = Collections.EMPTY_LIST;
}
else {
this.causalRelationships = relationships;
}
}
/**
* <p>initialiseFeaturesWith</p>
*
* @param features a {@link java.util.Collection} object.
*/
protected void initialiseFeaturesWith(Collection<F> features) {
if (features == null){
this.features = Collections.EMPTY_LIST;
}
else {
this.features = features;
}
}
/**
* <p>Getter for the field <code>interactor</code>.</p>
*
* @return a {@link psidev.psi.mi.jami.model.Interactor} object.
*/
public Interactor getInteractor() {
return this.interactor;
}
/** {@inheritDoc} */
public void setInteractor(Interactor interactor) {
if (interactor == null){
throw new IllegalArgumentException("The interactor cannot be null.");
}
Interactor oldInteractor = this.interactor;
this.interactor = interactor;
if (this.changeListener != null){
this.changeListener.onInteractorUpdate(this, oldInteractor);
}
}
/**
* <p>Getter for the field <code>causalRelationships</code>.</p>
*
* @return a {@link java.util.Collection} object.
*/
public Collection<CausalRelationship> getCausalRelationships() {
if (this.causalRelationships == null){
initialiseCausalRelationships();
}
return this.causalRelationships;
}
/**
* <p>Getter for the field <code>stoichiometry</code>.</p>
*
* @return a {@link psidev.psi.mi.jami.model.Stoichiometry} object.
*/
public Stoichiometry getStoichiometry() {
return this.stoichiometry;
}
/** {@inheritDoc} */
public void setStoichiometry(Integer stoichiometry) {
if (stoichiometry == null){
this.stoichiometry = null;
}
else {
this.stoichiometry = new DefaultStoichiometry(stoichiometry, stoichiometry);
}
}
/**
* <p>Setter for the field <code>stoichiometry</code>.</p>
*
* @param stoichiometry a {@link psidev.psi.mi.jami.model.Stoichiometry} object.
*/
public void setStoichiometry(Stoichiometry stoichiometry) {
this.stoichiometry = stoichiometry;
}
/**
* <p>Getter for the field <code>features</code>.</p>
*
* @return a {@link java.util.Collection} object.
*/
public Collection<F> getFeatures() {
if (features == null){
initialiseFeatures();
}
return this.features;
}
/**
* <p>Getter for the field <code>changeListener</code>.</p>
*
* @return a {@link psidev.psi.mi.jami.listener.EntityInteractorChangeListener} object.
*/
public EntityInteractorChangeListener getChangeListener() {
return this.changeListener;
}
/** {@inheritDoc} */
public void setChangeListener(EntityInteractorChangeListener listener) {
this.changeListener = listener;
}
/**
* <p>addFeature</p>
*
* @param feature a F object.
* @return a boolean.
*/
public boolean addFeature(F feature) {
if (feature == null){
return false;
}
if (getFeatures().add(feature)){
feature.setParticipant(this);
return true;
}
return false;
}
/**
* <p>removeFeature</p>
*
* @param feature a F object.
* @return a boolean.
*/
public boolean removeFeature(F feature) {
if (feature == null){
return false;
}
if (getFeatures().remove(feature)){
feature.setParticipant(null);
return true;
}
return false;
}
/** {@inheritDoc} */
public boolean addAllFeatures(Collection<? extends F> features) {
if (features == null){
return false;
}
boolean added = false;
for (F feature : features){
if (addFeature(feature)){
added = true;
}
}
return added;
}
/** {@inheritDoc} */
public boolean removeAllFeatures(Collection<? extends F> features) {
if (features == null){
return false;
}
boolean added = false;
for (F feature : features){
if (removeFeature(feature)){
added = true;
}
}
return added;
}
/** {@inheritDoc} */
@Override
public String toString() {
return "Entity: "+getInteractor().toString() + (getStoichiometry() != null ? ", stoichiometry: " + getStoichiometry().toString() : "");
}
}
| |
package com.eco.Economy.TileEntitys;
import com.eco.Economy.Items.Currency.CurrencyItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.common.util.Constants;
public class TileEntitySafe extends TileEntity implements IInventory {
public boolean top;
public boolean Open;
public float DoorRotate;
public int TotalAmountStored;
public String Placer = NULL_STRING;
public static String NULL_STRING ="ERROR";
public static String EMPTY_GUI_STRING = "ERROR_EMPTY_OWNER";
public void updateEntity(){
TileEntitySafe tile = null;
if(top){
if(worldObj.getTileEntity(xCoord, yCoord - 1, zCoord) instanceof TileEntitySafe){
tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord - 1, zCoord);
}
}else{
if(worldObj.getTileEntity(xCoord, yCoord + 1, zCoord) instanceof TileEntitySafe){
tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord + 1, zCoord);
}
}
if(tile != null) {
if (Open || tile.Open) {
if (DoorRotate > -2) {
DoorRotate -= 0.03;
}
} else if (!Open && !tile.Open) {
if (DoorRotate < 0)
DoorRotate += 0.03;
}
}
}
public String GetGuiOwner(){
if(!Placer.equalsIgnoreCase(NULL_STRING))
return Placer;
else
return EMPTY_GUI_STRING;
}
public void setOpen(){
Open = true;
if(top){
if(worldObj.getTileEntity(xCoord, yCoord - 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord - 1, zCoord);
tile.Open = true;
}
}else{
if(worldObj.getTileEntity(xCoord, yCoord + 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord + 1, zCoord);
tile.Open = true;
}
}
}
public void setClosed(){
Open = false;
if(top){
if(worldObj.getTileEntity(xCoord, yCoord - 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord - 1, zCoord);
tile.Open = false;
}
}else{
if(worldObj.getTileEntity(xCoord, yCoord + 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord + 1, zCoord);
tile.Open = false;
}
}
}
public void SetOwner(String n, boolean First){
if(!n.equalsIgnoreCase(NULL_STRING)){
if(First){
if(top){
if(worldObj.getTileEntity(xCoord, yCoord - 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord - 1, zCoord);
tile.SetOwner(n, false);
}
}else if (!top){
if(worldObj.getTileEntity(xCoord, yCoord + 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord + 1, zCoord);
tile.SetOwner(n, false);
}
}
}
Placer = n;
}
}
public TileEntitySafe(){
Items = new ItemStack[this.getSizeInventory()];
}
ItemStack[] Items;
public void writeToNBT(NBTTagCompound nbt)
{
super.writeToNBT(nbt);
nbt.setBoolean("IsTop", top);
nbt.setString("Pl", Placer);
nbt.setInteger("Amount", TotalAmountStored);
NBTTagList Items = new NBTTagList();
for (int i = 0; i < getSizeInventory(); i++){
ItemStack stack = getStackInSlot(i);
if(stack != null){
NBTTagCompound item = new NBTTagCompound();
item.setByte("Slot", (byte)i);
stack.writeToNBT(item);
Items.appendTag(item);
}
}
nbt.setTag("Items", Items);
}
public void readFromNBT(NBTTagCompound nbt)
{
super.readFromNBT(nbt);
top = nbt.getBoolean("IsTop");
Placer = nbt.getString("Pl");
TotalAmountStored = nbt.getInteger("Amount");
NBTTagList nbttaglist = nbt.getTagList("Items", Constants.NBT.TAG_COMPOUND);
Items = new ItemStack[getSizeInventory()];
for (int i = 0; i < nbttaglist.tagCount(); i++)
{
NBTTagCompound nbttagcompound1 = nbttaglist.getCompoundTagAt(i);
int j = nbttagcompound1.getByte("Slot") & 0xff;
if (j >= 0 && j < Items.length)
{
this.setInventorySlotContents(j, ItemStack.loadItemStackFromNBT(nbttagcompound1));
}
}
}
public int GetAmount(){
return TotalAmountStored;
}
public void SetAmount(int i){
TotalAmountStored = i;
}
@Override
public int getSizeInventory() {
return 54;
}
@Override
public ItemStack getStackInSlot(int i) {
return Items[i];
}
@Override
public ItemStack decrStackSize(int i, int j) {
ItemStack itemstack = getStackInSlot(i);
if(itemstack != null){
if(itemstack.stackSize <= j){
setInventorySlotContents(i, null);
}else{
itemstack = itemstack.splitStack(j);
}
}
return itemstack;
}
@Override
public ItemStack getStackInSlotOnClosing(int i) {
ItemStack item = getStackInSlot(i);
setInventorySlotContents(i, null);
return item;
}
@Override
public void setInventorySlotContents(int i, ItemStack itemstack) {
Items[i] = itemstack;
if(itemstack != null && itemstack.stackSize > getInventoryStackLimit()){
itemstack.stackSize = getInventoryStackLimit();
}
}
public void UpdateAmount(){
TotalAmountStored = 0;
for(int i = 0; i < Items.length; i++){
if(Items[i] != null && Items[i].getItem() != null && Items[i].getItem() instanceof CurrencyItem){
CurrencyItem item = (CurrencyItem)Items[i].getItem();
TotalAmountStored += (item.Value() * Items[i].stackSize);
}
}
}
public void InvChanged(){
UpdateAmount();
if(top){
if(worldObj.getTileEntity(xCoord, yCoord - 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord - 1, zCoord);
tile.Items = Items;
tile.UpdateAmount();
}
}else{
if(worldObj.getTileEntity(xCoord, yCoord + 1, zCoord) instanceof TileEntitySafe){
TileEntitySafe tile = (TileEntitySafe)worldObj.getTileEntity(xCoord, yCoord + 1, zCoord);
tile.Items = Items;
tile.UpdateAmount();
}
}
}
@Override
public String getInventoryName() {
return "Safe";
}
@Override
public boolean hasCustomInventoryName() {
return false;
}
@Override
public int getInventoryStackLimit() {
return 128;
}
@Override
public boolean isUseableByPlayer(EntityPlayer var1) {
return var1.getDistanceSq(xCoord, yCoord, zCoord) <= 64;
}
@Override
public void openInventory() {
setOpen();
}
@Override
public void closeInventory() {
setClosed();
}
@Override
public boolean isItemValidForSlot(int var1, ItemStack var2) {
return var2.getItem() instanceof CurrencyItem;
}
}
| |
/*
* Copyright (c) 2002-2021, City of Paris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice
* and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* License 1.0
*/
package fr.paris.lutece.portal.service.content;
import java.sql.Timestamp;
/**
* This class provides a structure to build portal pages.
*/
public class PageData
{
// //////////////////////////////////////////////////////////////////////////
// Constants
private static final String EMPTY_STRING = "";
private String _strName;
private String _strFavourite;
private String _strCssUrl;
private String _strCustomizeCssUrl;
private String _strPluginsCssUrl;
private String _strMetaAuthor;
private String _strMetaCopyright;
private String _strMetaKeywords;
private String _strMetaDescription;
private String _strHeader;
private String _strMenu;
private String _strPagePath;
private String _strContent;
private String _strFavicon;
private String _strTreeMenu;
private String _strTheme;
private boolean _bIsHomePage;
private Timestamp _tsDateUpdate;
private boolean _bDisplayDateUpdate;
/**
* Returns the name of the page
*
* @return The name of the page as a string.
*/
public String getName( )
{
return _strName;
}
/**
* Sets the name of the page to the specified string.
*
* @param strName
* The new name of the page.
*/
public void setName( String strName )
{
_strName = strName;
}
/**
* Returns the favourite of the page
*
* @return The favourite of the page as a string.
*/
public String getFavourite( )
{
return _strFavourite;
}
/**
* Sets the favourite of the page to the specified string.
*
* @param strFavourite
* The new favourite of the page.
*/
public void setFavourite( String strFavourite )
{
_strFavourite = strFavourite;
}
/**
* Returns the URL of the Cascading Style Sheet associated to this page
*
* @return the URL of the Cascading Style Sheet associated to this page as a String.
*/
public String getCssUrl( )
{
return _strCssUrl;
}
/**
* Sets the URL of the Cascading Style Sheet associated to this page
*
* @param strCssUrl
* Sets the URL of the Cascading Style Sheet associated to this page to the specified string.
*/
public void setCssUrl( String strCssUrl )
{
_strCssUrl = strCssUrl;
}
/**
* Returns the URL of the Customize Cascading Style Sheet associated to this page
*
* @return the URL of the Customize Cascading Style Sheet associated to this page as a String.
*/
public String getCustomizeCssUrl( )
{
return _strCustomizeCssUrl;
}
/**
* Sets the URL of the Customize Cascading Style Sheet associated to this page
*
* @param strCustomizeCssUrl
* Sets the URL of the Customize Cascading Style Sheet associated to this page to the specified string.
*/
public void setCustomizeCssUrl( String strCustomizeCssUrl )
{
_strCustomizeCssUrl = strCustomizeCssUrl;
}
/**
* Returns the URL of the Plugins Cascading Style Sheet associated to this page
*
* @return the URL of the Plugins Cascading Style Sheet associated to this page as a String.
*/
public String getPluginsCssUrl( )
{
return _strPluginsCssUrl;
}
/**
* Sets the URL of the Plugins Cascading Style Sheet associated to this page
*
* @param strPluginsCssUrl
* Sets the URL of the Plugins Cascading Style Sheet associated to this page to the specified string.
*/
public void setPluginsCssUrl( String strPluginsCssUrl )
{
_strPluginsCssUrl = strPluginsCssUrl;
}
/**
* Returns Author to mention in the META tags of the page.
*
* @return Author to mention in the META tags of the page as a String.
*/
public String getMetaAuthor( )
{
return _strMetaAuthor;
}
/**
* Sets Author to mention in the META tags of the page.
*
* @param strMetaAuthor
* The Author to mention in the META tags of the page
*/
public void setMetaAuthor( String strMetaAuthor )
{
_strMetaAuthor = strMetaAuthor;
}
/**
* Returns Copyright to mention in the META tags of the page.
*
* @return Copyright to mention in the META tags of the page as a String.
*/
public String getMetaCopyright( )
{
return _strMetaCopyright;
}
/**
* Sets Copyright to mention in the META tags of the page.
*
* @param strMetaCopyright
* The Copyright to mention in the META tags of the page
*/
public void setMetaCopyright( String strMetaCopyright )
{
_strMetaCopyright = strMetaCopyright;
}
/**
* Returns Keywords to mention in the META tags of the page.
*
* @return Keywords to mention in the META tags of the page as a String.
*/
public String getMetaKeywords( )
{
return _strMetaKeywords;
}
/**
* Sets Keywords to mention in the META tags of the page.
*
* @param strMetaKeywords
* The Keywords to mention in the META tags of the page.
*/
public void setMetaKeywords( String strMetaKeywords )
{
_strMetaKeywords = strMetaKeywords;
}
/**
* Returns Description to mention in the META tags of the page.
*
* @return Description to mention in the META tags of the page as a String.
*/
public String getMetaDescription( )
{
return _strMetaDescription;
}
/**
* Sets Description to mention in the META tags of the page.
*
* @param strMetaDescription
* The Description to mention in the META tags of the page.
*/
public void setMetaDescription( String strMetaDescription )
{
_strMetaDescription = strMetaDescription;
}
/**
* Returns the header to display at the top of the page.
*
* @return The header HTML code as a String.
*/
public String getHeader( )
{
return _strHeader;
}
/**
* Sets the header to display at the top of the page.
*
* @param strHeader
* Sets the header to display at the top of the page.
*/
public void setHeader( String strHeader )
{
_strHeader = strHeader;
}
/**
* Returns the menu associated to the page
*
* @return The HTML code of the menu associated to the page as a String
*/
public String getMenu( )
{
return _strMenu;
}
/**
* Sets the menu associated to the page
*
* @param strMenu
* The HTML code of the menu to associate to the page as a String
*/
public void setMenu( String strMenu )
{
_strMenu = strMenu;
}
/**
* Returns the page path.
*
* @return the page path.
*/
public String getPagePath( )
{
return _strPagePath;
}
/**
* Set the page path.
*
* @param strPagePath
* the page path
*/
public void setPagePath( String strPagePath )
{
_strPagePath = strPagePath;
}
/**
* Returns the page path.
*
* @return the page path.
*/
public String getTreeMenu( )
{
return _strTreeMenu;
}
/**
* Set the page path.
*
* @param strTreeMenu
* the page path
*/
public void setTreeMenu( String strTreeMenu )
{
_strTreeMenu = ( strTreeMenu == null ) ? EMPTY_STRING : strTreeMenu;
}
/**
* Returns the page content.
*
* @return The HTML code of the page content as a String.
*/
public String getContent( )
{
return _strContent;
}
/**
* Sets the page content.
*
* @param strContent
* The HTML code of the page content as a String.
*/
public void setContent( String strContent )
{
_strContent = strContent;
}
/**
* Returns the favicon of the page
*
* @return The favicon of the page as a string.
*/
public String getFavicon( )
{
return _strFavicon;
}
/**
* Sets the Favicon of the page to the specified string.
*
* @param strFavicon
* The new Favicon of the page.
*/
public void setFavicon( String strFavicon )
{
_strFavicon = strFavicon;
}
/**
* Returns the theme of the page
*
* @return The theme of the page as a string.
*/
public String getTheme( )
{
return _strTheme;
}
/**
* Sets the Theme of the page to the specified string.
*
* @param strTheme
* The new Theme of the page.
*/
public void setTheme( String strTheme )
{
_strTheme = strTheme;
}
/**
* Returns weither the current page is an homepage or not.
*
* @return true if the page is an homepage, otherwise false.
*/
public boolean isHomePage( )
{
return _bIsHomePage;
}
/**
* Sets the homepage indicator.
*
* @param bHomePage
* Should be true if the page is an homepage, otherwise false.
*/
public void setHomePage( boolean bHomePage )
{
_bIsHomePage = bHomePage;
}
/**
* Returns the update date of the page
*
* @return The favicon of the page as a string.
*/
public Timestamp getDateUpdate( )
{
return _tsDateUpdate;
}
/**
* Sets the update date of the page.
*
* @param strFavicon
* The new Favicon of the page.
*/
public void setDateUpdate( Timestamp tsDateUpdate )
{
_tsDateUpdate = tsDateUpdate;
}
/**
* Returns the display update date boolean
*
* @return The display update date as a boolean.
*/
public boolean getDisplayDateUpdate( )
{
return _bDisplayDateUpdate;
}
/**
* Sets the display update date boolean.
*
* @param bDisplayDateUpdate
* The display update date boolean.
*/
public void setDisplayDateUpdate( boolean bDisplayDateUpdate )
{
_bDisplayDateUpdate = bDisplayDateUpdate;
}
}
| |
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.file;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class FileEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
FileEndpoint target = (FileEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allownullbody":
case "allowNullBody": target.setAllowNullBody(property(camelContext, boolean.class, value)); return true;
case "antexclude":
case "antExclude": target.setAntExclude(property(camelContext, java.lang.String.class, value)); return true;
case "antfiltercasesensitive":
case "antFilterCaseSensitive": target.setAntFilterCaseSensitive(property(camelContext, boolean.class, value)); return true;
case "antinclude":
case "antInclude": target.setAntInclude(property(camelContext, java.lang.String.class, value)); return true;
case "appendchars":
case "appendChars": target.setAppendChars(property(camelContext, java.lang.String.class, value)); return true;
case "autocreate":
case "autoCreate": target.setAutoCreate(property(camelContext, boolean.class, value)); return true;
case "backofferrorthreshold":
case "backoffErrorThreshold": target.setBackoffErrorThreshold(property(camelContext, int.class, value)); return true;
case "backoffidlethreshold":
case "backoffIdleThreshold": target.setBackoffIdleThreshold(property(camelContext, int.class, value)); return true;
case "backoffmultiplier":
case "backoffMultiplier": target.setBackoffMultiplier(property(camelContext, int.class, value)); return true;
case "basicpropertybinding":
case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "buffersize":
case "bufferSize": target.setBufferSize(property(camelContext, int.class, value)); return true;
case "charset": target.setCharset(property(camelContext, java.lang.String.class, value)); return true;
case "chmod": target.setChmod(property(camelContext, java.lang.String.class, value)); return true;
case "chmoddirectory":
case "chmodDirectory": target.setChmodDirectory(property(camelContext, java.lang.String.class, value)); return true;
case "copyanddeleteonrenamefail":
case "copyAndDeleteOnRenameFail": target.setCopyAndDeleteOnRenameFail(property(camelContext, boolean.class, value)); return true;
case "delay": target.setDelay(property(camelContext, long.class, value)); return true;
case "delete": target.setDelete(property(camelContext, boolean.class, value)); return true;
case "directorymustexist":
case "directoryMustExist": target.setDirectoryMustExist(property(camelContext, boolean.class, value)); return true;
case "donefilename":
case "doneFileName": target.setDoneFileName(property(camelContext, java.lang.String.class, value)); return true;
case "eagerdeletetargetfile":
case "eagerDeleteTargetFile": target.setEagerDeleteTargetFile(property(camelContext, boolean.class, value)); return true;
case "eagermaxmessagesperpoll":
case "eagerMaxMessagesPerPoll": target.setEagerMaxMessagesPerPoll(property(camelContext, boolean.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "exclude": target.setExclude(property(camelContext, java.lang.String.class, value)); return true;
case "exclusivereadlockstrategy":
case "exclusiveReadLockStrategy": target.setExclusiveReadLockStrategy(property(camelContext, org.apache.camel.component.file.GenericFileExclusiveReadLockStrategy.class, value)); return true;
case "extendedattributes":
case "extendedAttributes": target.setExtendedAttributes(property(camelContext, java.lang.String.class, value)); return true;
case "fileexist":
case "fileExist": target.setFileExist(property(camelContext, org.apache.camel.component.file.GenericFileExist.class, value)); return true;
case "filename":
case "fileName": target.setFileName(property(camelContext, java.lang.String.class, value)); return true;
case "filter": target.setFilter(property(camelContext, org.apache.camel.component.file.GenericFileFilter.class, value)); return true;
case "filterdirectory":
case "filterDirectory": target.setFilterDirectory(property(camelContext, java.lang.String.class, value)); return true;
case "filterfile":
case "filterFile": target.setFilterFile(property(camelContext, java.lang.String.class, value)); return true;
case "flatten": target.setFlatten(property(camelContext, boolean.class, value)); return true;
case "forcewrites":
case "forceWrites": target.setForceWrites(property(camelContext, boolean.class, value)); return true;
case "greedy": target.setGreedy(property(camelContext, boolean.class, value)); return true;
case "idempotent": target.setIdempotent(property(camelContext, java.lang.Boolean.class, value)); return true;
case "idempotentkey":
case "idempotentKey": target.setIdempotentKey(property(camelContext, java.lang.String.class, value)); return true;
case "idempotentrepository":
case "idempotentRepository": target.setIdempotentRepository(property(camelContext, org.apache.camel.spi.IdempotentRepository.class, value)); return true;
case "inprogressrepository":
case "inProgressRepository": target.setInProgressRepository(property(camelContext, org.apache.camel.spi.IdempotentRepository.class, value)); return true;
case "include": target.setInclude(property(camelContext, java.lang.String.class, value)); return true;
case "initialdelay":
case "initialDelay": target.setInitialDelay(property(camelContext, long.class, value)); return true;
case "jailstartingdirectory":
case "jailStartingDirectory": target.setJailStartingDirectory(property(camelContext, boolean.class, value)); return true;
case "keeplastmodified":
case "keepLastModified": target.setKeepLastModified(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "localworkdirectory":
case "localWorkDirectory": target.setLocalWorkDirectory(property(camelContext, java.lang.String.class, value)); return true;
case "maxdepth":
case "maxDepth": target.setMaxDepth(property(camelContext, int.class, value)); return true;
case "maxmessagesperpoll":
case "maxMessagesPerPoll": target.setMaxMessagesPerPoll(property(camelContext, int.class, value)); return true;
case "mindepth":
case "minDepth": target.setMinDepth(property(camelContext, int.class, value)); return true;
case "move": target.setMove(property(camelContext, java.lang.String.class, value)); return true;
case "moveexisting":
case "moveExisting": target.setMoveExisting(property(camelContext, java.lang.String.class, value)); return true;
case "moveexistingfilestrategy":
case "moveExistingFileStrategy": target.setMoveExistingFileStrategy(property(camelContext, org.apache.camel.component.file.strategy.FileMoveExistingStrategy.class, value)); return true;
case "movefailed":
case "moveFailed": target.setMoveFailed(property(camelContext, java.lang.String.class, value)); return true;
case "noop": target.setNoop(property(camelContext, boolean.class, value)); return true;
case "oncompletionexceptionhandler":
case "onCompletionExceptionHandler": target.setOnCompletionExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "pollstrategy":
case "pollStrategy": target.setPollStrategy(property(camelContext, org.apache.camel.spi.PollingConsumerPollStrategy.class, value)); return true;
case "premove":
case "preMove": target.setPreMove(property(camelContext, java.lang.String.class, value)); return true;
case "presort":
case "preSort": target.setPreSort(property(camelContext, boolean.class, value)); return true;
case "probecontenttype":
case "probeContentType": target.setProbeContentType(property(camelContext, boolean.class, value)); return true;
case "processstrategy":
case "processStrategy": target.setProcessStrategy(property(camelContext, org.apache.camel.component.file.GenericFileProcessStrategy.class, value)); return true;
case "readlock":
case "readLock": target.setReadLock(property(camelContext, java.lang.String.class, value)); return true;
case "readlockcheckinterval":
case "readLockCheckInterval": target.setReadLockCheckInterval(property(camelContext, long.class, value)); return true;
case "readlockdeleteorphanlockfiles":
case "readLockDeleteOrphanLockFiles": target.setReadLockDeleteOrphanLockFiles(property(camelContext, boolean.class, value)); return true;
case "readlockidempotentreleaseasync":
case "readLockIdempotentReleaseAsync": target.setReadLockIdempotentReleaseAsync(property(camelContext, boolean.class, value)); return true;
case "readlockidempotentreleaseasyncpoolsize":
case "readLockIdempotentReleaseAsyncPoolSize": target.setReadLockIdempotentReleaseAsyncPoolSize(property(camelContext, int.class, value)); return true;
case "readlockidempotentreleasedelay":
case "readLockIdempotentReleaseDelay": target.setReadLockIdempotentReleaseDelay(property(camelContext, int.class, value)); return true;
case "readlockidempotentreleaseexecutorservice":
case "readLockIdempotentReleaseExecutorService": target.setReadLockIdempotentReleaseExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true;
case "readlocklogginglevel":
case "readLockLoggingLevel": target.setReadLockLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true;
case "readlockmarkerfile":
case "readLockMarkerFile": target.setReadLockMarkerFile(property(camelContext, boolean.class, value)); return true;
case "readlockminage":
case "readLockMinAge": target.setReadLockMinAge(property(camelContext, long.class, value)); return true;
case "readlockminlength":
case "readLockMinLength": target.setReadLockMinLength(property(camelContext, long.class, value)); return true;
case "readlockremoveoncommit":
case "readLockRemoveOnCommit": target.setReadLockRemoveOnCommit(property(camelContext, boolean.class, value)); return true;
case "readlockremoveonrollback":
case "readLockRemoveOnRollback": target.setReadLockRemoveOnRollback(property(camelContext, boolean.class, value)); return true;
case "readlocktimeout":
case "readLockTimeout": target.setReadLockTimeout(property(camelContext, long.class, value)); return true;
case "recursive": target.setRecursive(property(camelContext, boolean.class, value)); return true;
case "renameusingcopy":
case "renameUsingCopy": target.setRenameUsingCopy(property(camelContext, boolean.class, value)); return true;
case "repeatcount":
case "repeatCount": target.setRepeatCount(property(camelContext, long.class, value)); return true;
case "runlogginglevel":
case "runLoggingLevel": target.setRunLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true;
case "scheduledexecutorservice":
case "scheduledExecutorService": target.setScheduledExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true;
case "scheduler": target.setScheduler(property(camelContext, java.lang.Object.class, value)); return true;
case "schedulerproperties":
case "schedulerProperties": target.setSchedulerProperties(property(camelContext, java.util.Map.class, value)); return true;
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": target.setSendEmptyMessageWhenIdle(property(camelContext, boolean.class, value)); return true;
case "shuffle": target.setShuffle(property(camelContext, boolean.class, value)); return true;
case "sortby":
case "sortBy": target.setSortBy(property(camelContext, java.lang.String.class, value)); return true;
case "sorter": target.setSorter(property(camelContext, java.util.Comparator.class, value)); return true;
case "startscheduler":
case "startScheduler": target.setStartScheduler(property(camelContext, boolean.class, value)); return true;
case "startingdirectorymustexist":
case "startingDirectoryMustExist": target.setStartingDirectoryMustExist(property(camelContext, boolean.class, value)); return true;
case "startingdirectorymusthaveaccess":
case "startingDirectoryMustHaveAccess": target.setStartingDirectoryMustHaveAccess(property(camelContext, boolean.class, value)); return true;
case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true;
case "tempfilename":
case "tempFileName": target.setTempFileName(property(camelContext, java.lang.String.class, value)); return true;
case "tempprefix":
case "tempPrefix": target.setTempPrefix(property(camelContext, java.lang.String.class, value)); return true;
case "timeunit":
case "timeUnit": target.setTimeUnit(property(camelContext, java.util.concurrent.TimeUnit.class, value)); return true;
case "usefixeddelay":
case "useFixedDelay": target.setUseFixedDelay(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
Map<String, Object> answer = new CaseInsensitiveMap();
answer.put("allowNullBody", boolean.class);
answer.put("antExclude", java.lang.String.class);
answer.put("antFilterCaseSensitive", boolean.class);
answer.put("antInclude", java.lang.String.class);
answer.put("appendChars", java.lang.String.class);
answer.put("autoCreate", boolean.class);
answer.put("backoffErrorThreshold", int.class);
answer.put("backoffIdleThreshold", int.class);
answer.put("backoffMultiplier", int.class);
answer.put("basicPropertyBinding", boolean.class);
answer.put("bridgeErrorHandler", boolean.class);
answer.put("bufferSize", int.class);
answer.put("charset", java.lang.String.class);
answer.put("chmod", java.lang.String.class);
answer.put("chmodDirectory", java.lang.String.class);
answer.put("copyAndDeleteOnRenameFail", boolean.class);
answer.put("delay", long.class);
answer.put("delete", boolean.class);
answer.put("directoryMustExist", boolean.class);
answer.put("doneFileName", java.lang.String.class);
answer.put("eagerDeleteTargetFile", boolean.class);
answer.put("eagerMaxMessagesPerPoll", boolean.class);
answer.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
answer.put("exchangePattern", org.apache.camel.ExchangePattern.class);
answer.put("exclude", java.lang.String.class);
answer.put("exclusiveReadLockStrategy", org.apache.camel.component.file.GenericFileExclusiveReadLockStrategy.class);
answer.put("extendedAttributes", java.lang.String.class);
answer.put("fileExist", org.apache.camel.component.file.GenericFileExist.class);
answer.put("fileName", java.lang.String.class);
answer.put("filter", org.apache.camel.component.file.GenericFileFilter.class);
answer.put("filterDirectory", java.lang.String.class);
answer.put("filterFile", java.lang.String.class);
answer.put("flatten", boolean.class);
answer.put("forceWrites", boolean.class);
answer.put("greedy", boolean.class);
answer.put("idempotent", java.lang.Boolean.class);
answer.put("idempotentKey", java.lang.String.class);
answer.put("idempotentRepository", org.apache.camel.spi.IdempotentRepository.class);
answer.put("inProgressRepository", org.apache.camel.spi.IdempotentRepository.class);
answer.put("include", java.lang.String.class);
answer.put("initialDelay", long.class);
answer.put("jailStartingDirectory", boolean.class);
answer.put("keepLastModified", boolean.class);
answer.put("lazyStartProducer", boolean.class);
answer.put("localWorkDirectory", java.lang.String.class);
answer.put("maxDepth", int.class);
answer.put("maxMessagesPerPoll", int.class);
answer.put("minDepth", int.class);
answer.put("move", java.lang.String.class);
answer.put("moveExisting", java.lang.String.class);
answer.put("moveExistingFileStrategy", org.apache.camel.component.file.strategy.FileMoveExistingStrategy.class);
answer.put("moveFailed", java.lang.String.class);
answer.put("noop", boolean.class);
answer.put("onCompletionExceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
answer.put("pollStrategy", org.apache.camel.spi.PollingConsumerPollStrategy.class);
answer.put("preMove", java.lang.String.class);
answer.put("preSort", boolean.class);
answer.put("probeContentType", boolean.class);
answer.put("processStrategy", org.apache.camel.component.file.GenericFileProcessStrategy.class);
answer.put("readLock", java.lang.String.class);
answer.put("readLockCheckInterval", long.class);
answer.put("readLockDeleteOrphanLockFiles", boolean.class);
answer.put("readLockIdempotentReleaseAsync", boolean.class);
answer.put("readLockIdempotentReleaseAsyncPoolSize", int.class);
answer.put("readLockIdempotentReleaseDelay", int.class);
answer.put("readLockIdempotentReleaseExecutorService", java.util.concurrent.ScheduledExecutorService.class);
answer.put("readLockLoggingLevel", org.apache.camel.LoggingLevel.class);
answer.put("readLockMarkerFile", boolean.class);
answer.put("readLockMinAge", long.class);
answer.put("readLockMinLength", long.class);
answer.put("readLockRemoveOnCommit", boolean.class);
answer.put("readLockRemoveOnRollback", boolean.class);
answer.put("readLockTimeout", long.class);
answer.put("recursive", boolean.class);
answer.put("renameUsingCopy", boolean.class);
answer.put("repeatCount", long.class);
answer.put("runLoggingLevel", org.apache.camel.LoggingLevel.class);
answer.put("scheduledExecutorService", java.util.concurrent.ScheduledExecutorService.class);
answer.put("scheduler", java.lang.Object.class);
answer.put("schedulerProperties", java.util.Map.class);
answer.put("sendEmptyMessageWhenIdle", boolean.class);
answer.put("shuffle", boolean.class);
answer.put("sortBy", java.lang.String.class);
answer.put("sorter", java.util.Comparator.class);
answer.put("startScheduler", boolean.class);
answer.put("startingDirectoryMustExist", boolean.class);
answer.put("startingDirectoryMustHaveAccess", boolean.class);
answer.put("synchronous", boolean.class);
answer.put("tempFileName", java.lang.String.class);
answer.put("tempPrefix", java.lang.String.class);
answer.put("timeUnit", java.util.concurrent.TimeUnit.class);
answer.put("useFixedDelay", boolean.class);
return answer;
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
FileEndpoint target = (FileEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allownullbody":
case "allowNullBody": return target.isAllowNullBody();
case "antexclude":
case "antExclude": return target.getAntExclude();
case "antfiltercasesensitive":
case "antFilterCaseSensitive": return target.isAntFilterCaseSensitive();
case "antinclude":
case "antInclude": return target.getAntInclude();
case "appendchars":
case "appendChars": return target.getAppendChars();
case "autocreate":
case "autoCreate": return target.isAutoCreate();
case "backofferrorthreshold":
case "backoffErrorThreshold": return target.getBackoffErrorThreshold();
case "backoffidlethreshold":
case "backoffIdleThreshold": return target.getBackoffIdleThreshold();
case "backoffmultiplier":
case "backoffMultiplier": return target.getBackoffMultiplier();
case "basicpropertybinding":
case "basicPropertyBinding": return target.isBasicPropertyBinding();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "buffersize":
case "bufferSize": return target.getBufferSize();
case "charset": return target.getCharset();
case "chmod": return target.getChmod();
case "chmoddirectory":
case "chmodDirectory": return target.getChmodDirectory();
case "copyanddeleteonrenamefail":
case "copyAndDeleteOnRenameFail": return target.isCopyAndDeleteOnRenameFail();
case "delay": return target.getDelay();
case "delete": return target.isDelete();
case "directorymustexist":
case "directoryMustExist": return target.isDirectoryMustExist();
case "donefilename":
case "doneFileName": return target.getDoneFileName();
case "eagerdeletetargetfile":
case "eagerDeleteTargetFile": return target.isEagerDeleteTargetFile();
case "eagermaxmessagesperpoll":
case "eagerMaxMessagesPerPoll": return target.isEagerMaxMessagesPerPoll();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "exclude": return target.getExclude();
case "exclusivereadlockstrategy":
case "exclusiveReadLockStrategy": return target.getExclusiveReadLockStrategy();
case "extendedattributes":
case "extendedAttributes": return target.getExtendedAttributes();
case "fileexist":
case "fileExist": return target.getFileExist();
case "filename":
case "fileName": return target.getFileName();
case "filter": return target.getFilter();
case "filterdirectory":
case "filterDirectory": return target.getFilterDirectory();
case "filterfile":
case "filterFile": return target.getFilterFile();
case "flatten": return target.isFlatten();
case "forcewrites":
case "forceWrites": return target.isForceWrites();
case "greedy": return target.isGreedy();
case "idempotent": return target.getIdempotent();
case "idempotentkey":
case "idempotentKey": return target.getIdempotentKey();
case "idempotentrepository":
case "idempotentRepository": return target.getIdempotentRepository();
case "inprogressrepository":
case "inProgressRepository": return target.getInProgressRepository();
case "include": return target.getInclude();
case "initialdelay":
case "initialDelay": return target.getInitialDelay();
case "jailstartingdirectory":
case "jailStartingDirectory": return target.isJailStartingDirectory();
case "keeplastmodified":
case "keepLastModified": return target.isKeepLastModified();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "localworkdirectory":
case "localWorkDirectory": return target.getLocalWorkDirectory();
case "maxdepth":
case "maxDepth": return target.getMaxDepth();
case "maxmessagesperpoll":
case "maxMessagesPerPoll": return target.getMaxMessagesPerPoll();
case "mindepth":
case "minDepth": return target.getMinDepth();
case "move": return target.getMove();
case "moveexisting":
case "moveExisting": return target.getMoveExisting();
case "moveexistingfilestrategy":
case "moveExistingFileStrategy": return target.getMoveExistingFileStrategy();
case "movefailed":
case "moveFailed": return target.getMoveFailed();
case "noop": return target.isNoop();
case "oncompletionexceptionhandler":
case "onCompletionExceptionHandler": return target.getOnCompletionExceptionHandler();
case "pollstrategy":
case "pollStrategy": return target.getPollStrategy();
case "premove":
case "preMove": return target.getPreMove();
case "presort":
case "preSort": return target.isPreSort();
case "probecontenttype":
case "probeContentType": return target.isProbeContentType();
case "processstrategy":
case "processStrategy": return target.getProcessStrategy();
case "readlock":
case "readLock": return target.getReadLock();
case "readlockcheckinterval":
case "readLockCheckInterval": return target.getReadLockCheckInterval();
case "readlockdeleteorphanlockfiles":
case "readLockDeleteOrphanLockFiles": return target.isReadLockDeleteOrphanLockFiles();
case "readlockidempotentreleaseasync":
case "readLockIdempotentReleaseAsync": return target.isReadLockIdempotentReleaseAsync();
case "readlockidempotentreleaseasyncpoolsize":
case "readLockIdempotentReleaseAsyncPoolSize": return target.getReadLockIdempotentReleaseAsyncPoolSize();
case "readlockidempotentreleasedelay":
case "readLockIdempotentReleaseDelay": return target.getReadLockIdempotentReleaseDelay();
case "readlockidempotentreleaseexecutorservice":
case "readLockIdempotentReleaseExecutorService": return target.getReadLockIdempotentReleaseExecutorService();
case "readlocklogginglevel":
case "readLockLoggingLevel": return target.getReadLockLoggingLevel();
case "readlockmarkerfile":
case "readLockMarkerFile": return target.isReadLockMarkerFile();
case "readlockminage":
case "readLockMinAge": return target.getReadLockMinAge();
case "readlockminlength":
case "readLockMinLength": return target.getReadLockMinLength();
case "readlockremoveoncommit":
case "readLockRemoveOnCommit": return target.isReadLockRemoveOnCommit();
case "readlockremoveonrollback":
case "readLockRemoveOnRollback": return target.isReadLockRemoveOnRollback();
case "readlocktimeout":
case "readLockTimeout": return target.getReadLockTimeout();
case "recursive": return target.isRecursive();
case "renameusingcopy":
case "renameUsingCopy": return target.isRenameUsingCopy();
case "repeatcount":
case "repeatCount": return target.getRepeatCount();
case "runlogginglevel":
case "runLoggingLevel": return target.getRunLoggingLevel();
case "scheduledexecutorservice":
case "scheduledExecutorService": return target.getScheduledExecutorService();
case "scheduler": return target.getScheduler();
case "schedulerproperties":
case "schedulerProperties": return target.getSchedulerProperties();
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": return target.isSendEmptyMessageWhenIdle();
case "shuffle": return target.isShuffle();
case "sortby":
case "sortBy": return target.getSortBy();
case "sorter": return target.getSorter();
case "startscheduler":
case "startScheduler": return target.isStartScheduler();
case "startingdirectorymustexist":
case "startingDirectoryMustExist": return target.isStartingDirectoryMustExist();
case "startingdirectorymusthaveaccess":
case "startingDirectoryMustHaveAccess": return target.isStartingDirectoryMustHaveAccess();
case "synchronous": return target.isSynchronous();
case "tempfilename":
case "tempFileName": return target.getTempFileName();
case "tempprefix":
case "tempPrefix": return target.getTempPrefix();
case "timeunit":
case "timeUnit": return target.getTimeUnit();
case "usefixeddelay":
case "useFixedDelay": return target.isUseFixedDelay();
default: return null;
}
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.core5.http.impl.nio;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.StandardCharsets;
import org.apache.hc.core5.http.ReadableByteChannelMock;
import org.apache.hc.core5.http.impl.BasicHttpTransportMetrics;
import org.apache.hc.core5.http.nio.SessionInputBuffer;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
/**
* Simple tests for {@link LengthDelimitedDecoder}.
*/
public class TestIdentityDecoder {
private File tmpfile;
protected File createTempFile() throws IOException {
this.tmpfile = File.createTempFile("testFile", ".txt");
return this.tmpfile;
}
@After
public void deleteTempFile() {
if (this.tmpfile != null && this.tmpfile.exists()) {
this.tmpfile.delete();
}
}
@Test
public void testBasicDecoding() throws Exception {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"stuff;", "more stuff"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
final IdentityDecoder decoder = new IdentityDecoder(channel, inbuf, metrics);
final ByteBuffer dst = ByteBuffer.allocate(1024);
int bytesRead = decoder.read(dst);
Assert.assertEquals(6, bytesRead);
Assert.assertEquals("stuff;", CodecTestUtils.convert(dst));
Assert.assertFalse(decoder.isCompleted());
Assert.assertEquals(6, metrics.getBytesTransferred());
dst.clear();
bytesRead = decoder.read(dst);
Assert.assertEquals(10, bytesRead);
Assert.assertEquals("more stuff", CodecTestUtils.convert(dst));
Assert.assertFalse(decoder.isCompleted());
Assert.assertEquals(16, metrics.getBytesTransferred());
dst.clear();
bytesRead = decoder.read(dst);
Assert.assertEquals(-1, bytesRead);
Assert.assertTrue(decoder.isCompleted());
Assert.assertEquals(16, metrics.getBytesTransferred());
dst.clear();
bytesRead = decoder.read(dst);
Assert.assertEquals(-1, bytesRead);
Assert.assertTrue(decoder.isCompleted());
Assert.assertEquals(16, metrics.getBytesTransferred());
Assert.assertEquals("[identity; completed: true]", decoder.toString());
}
@Test
public void testDecodingFromSessionBuffer() throws Exception {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"stuff;", "more stuff"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
inbuf.fill(channel);
Assert.assertEquals(6, inbuf.length());
final IdentityDecoder decoder = new IdentityDecoder(channel, inbuf, metrics);
final ByteBuffer dst = ByteBuffer.allocate(1024);
int bytesRead = decoder.read(dst);
Assert.assertEquals(6, bytesRead);
Assert.assertEquals("stuff;", CodecTestUtils.convert(dst));
Assert.assertFalse(decoder.isCompleted());
Assert.assertEquals(0, metrics.getBytesTransferred()); // doesn't count if from session buffer
dst.clear();
bytesRead = decoder.read(dst);
Assert.assertEquals(10, bytesRead);
Assert.assertEquals("more stuff", CodecTestUtils.convert(dst));
Assert.assertFalse(decoder.isCompleted());
Assert.assertEquals(10, metrics.getBytesTransferred());
dst.clear();
bytesRead = decoder.read(dst);
Assert.assertEquals(-1, bytesRead);
Assert.assertTrue(decoder.isCompleted());
Assert.assertEquals(10, metrics.getBytesTransferred());
dst.clear();
bytesRead = decoder.read(dst);
Assert.assertEquals(-1, bytesRead);
Assert.assertTrue(decoder.isCompleted());
Assert.assertEquals(10, metrics.getBytesTransferred());
}
@Test
public void testBasicDecodingFile() throws Exception {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"stuff; ", "more stuff; ", "a lot more stuff!"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
final IdentityDecoder decoder = new IdentityDecoder(
channel, inbuf, metrics);
createTempFile();
final RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw");
try {
final FileChannel fchannel = testfile.getChannel();
long pos = 0;
while (!decoder.isCompleted()) {
final long bytesRead = decoder.transfer(fchannel, pos, 10);
if (bytesRead > 0) {
pos += bytesRead;
}
}
Assert.assertEquals(testfile.length(), metrics.getBytesTransferred());
} finally {
testfile.close();
}
Assert.assertEquals("stuff; more stuff; a lot more stuff!",
CodecTestUtils.readFromFile(this.tmpfile));
}
@Test
public void testDecodingFileWithBufferedSessionData() throws Exception {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"stuff; ", "more stuff; ", "a lot more stuff!"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
final IdentityDecoder decoder = new IdentityDecoder(
channel, inbuf, metrics);
final int i = inbuf.fill(channel);
Assert.assertEquals(7, i);
createTempFile();
final RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw");
try {
final FileChannel fchannel = testfile.getChannel();
long pos = 0;
while (!decoder.isCompleted()) {
final long bytesRead = decoder.transfer(fchannel, pos, 10);
if (bytesRead > 0) {
pos += bytesRead;
}
}
// count everything except the initial 7 bytes that went to the session buffer
Assert.assertEquals(testfile.length() - 7, metrics.getBytesTransferred());
} finally {
testfile.close();
}
Assert.assertEquals("stuff; more stuff; a lot more stuff!",
CodecTestUtils.readFromFile(this.tmpfile));
}
@Test
public void testDecodingFileWithOffsetAndBufferedSessionData() throws Exception {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"stuff; ", "more stuff; ", "a lot more stuff!"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
final IdentityDecoder decoder = new IdentityDecoder(
channel, inbuf, metrics);
final int i = inbuf.fill(channel);
Assert.assertEquals(7, i);
final byte[] beginning = "beginning; ".getBytes(StandardCharsets.US_ASCII);
createTempFile();
RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw");
try {
testfile.write(beginning);
} finally {
testfile.close();
}
testfile = new RandomAccessFile(this.tmpfile, "rw");
try {
final FileChannel fchannel = testfile.getChannel();
long pos = beginning.length;
while (!decoder.isCompleted()) {
if(testfile.length() < pos) {
testfile.setLength(pos);
}
final long bytesRead = decoder.transfer(fchannel, pos, 10);
if (bytesRead > 0) {
pos += bytesRead;
}
}
// count everything except the initial 7 bytes that went to the session buffer
Assert.assertEquals(testfile.length() - 7 - beginning.length, metrics.getBytesTransferred());
} finally {
testfile.close();
}
Assert.assertEquals("beginning; stuff; more stuff; a lot more stuff!",
CodecTestUtils.readFromFile(this.tmpfile));
}
@Test
public void testWriteBeyondFileSize() throws Exception {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"a"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
final IdentityDecoder decoder = new IdentityDecoder(
channel, inbuf, metrics);
createTempFile();
final RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw");
try {
Assert.assertEquals(0, testfile.length());
final FileChannel fchannel = testfile.getChannel();
try {
decoder.transfer(fchannel, 5, 10);
Assert.fail("expected IOException");
} catch(final IOException iox) {}
} finally {
testfile.close();
}
}
@Test
public void testInvalidConstructor() {
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {"stuff;", "more stuff"}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
try {
new IdentityDecoder(null, null, null);
Assert.fail("IllegalArgumentException should have been thrown");
} catch (final IllegalArgumentException ex) {
// ignore
}
try {
new IdentityDecoder(channel, null, null);
Assert.fail("IllegalArgumentException should have been thrown");
} catch (final IllegalArgumentException ex) {
// ignore
}
try {
new IdentityDecoder(channel, inbuf, null);
Assert.fail("IllegalArgumentException should have been thrown");
} catch (final IllegalArgumentException ex) {
// ignore
}
}
@Test
public void testInvalidInput() throws Exception {
final String s = "stuff";
final ReadableByteChannel channel = new ReadableByteChannelMock(
new String[] {s}, StandardCharsets.US_ASCII);
final SessionInputBuffer inbuf = new SessionInputBufferImpl(1024, 256, 0, StandardCharsets.US_ASCII);
final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics();
final IdentityDecoder decoder = new IdentityDecoder(channel, inbuf, metrics);
try {
decoder.read(null);
Assert.fail("IllegalArgumentException should have been thrown");
} catch (final IllegalArgumentException ex) {
// expected
}
}
}
| |
package org.esupportail.pstage.domain.beans;
import java.io.Serializable;
/**
* @author cleprous
* RoadMap : Chemin de navigation.
*/
public class RoadMap implements Serializable {
/*
******************* PROPERTIES ******************* */
/**
*
*/
private static final long serialVersionUID = 5819535348008163103L;
/**
* Rang du chemin.
*/
private Integer rang;
/**
* Nom long du chemin.
*/
private String title;
/**
* Nom court du chemin.
*/
private String label;
/**
* Action permettant la redirection.
*/
private String action;
/**
* A true if the current page.
*/
private Boolean isCurrentPage;
/*
******************* INIT ************************* */
/**
* Constructors.
*/
public RoadMap() {
super();
}
/**
* Constructors.
* @param action
* @param isCurrentPage
* @param label
* @param rang
* @param title
*/
public RoadMap(final String action, final Boolean isCurrentPage, final String label,
final Integer rang, final String title) {
super();
this.action = action;
this.isCurrentPage = isCurrentPage;
this.label = label;
this.rang = rang;
this.title = title;
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((action == null) ? 0 : action.hashCode());
result = prime * result + ((label == null) ? 0 : label.hashCode());
result = prime * result + ((rang == null) ? 0 : rang.hashCode());
result = prime * result + ((isCurrentPage == null) ? 0 : isCurrentPage.hashCode());
result = prime * result + ((title == null) ? 0 : title.hashCode());
return result;
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (this == obj) { return true; }
if (obj == null) { return false; }
if (!(obj instanceof RoadMap)) { return false; }
RoadMap other = (RoadMap) obj;
if (action == null) {
if (other.action != null) { return false; }
} else if (!action.equals(other.action)) { return false; }
if (label == null) {
if (other.label != null) { return false; }
} else if (!label.equals(other.label)) { return false; }
if (rang == null) {
if (other.rang != null) { return false; }
} else if (!rang.equals(other.rang)) { return false; }
if (isCurrentPage == null) {
if (other.isCurrentPage != null) { return false; }
} else if (!isCurrentPage.equals(other.isCurrentPage)) { return false; }
if (title == null) {
if (other.title != null) { return false; }
} else if (!title.equals(other.title)) { return false; }
return true;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "RoadMap#" + hashCode() + "[rang=[" + rang
+ "],[label=[" + label + "],[action=[" + action
+ "],[isCurrentPage=[" + isCurrentPage + "],[title=[" + title + "]]";
}
/*
******************* METHODS ********************** */
/*
******************* ACCESSORS ******************** */
/**
* @return the rang
*/
public Integer getRang() {
return rang;
}
/**
* @param rang the rang to set
*/
public void setRang(final Integer rang) {
this.rang = rang;
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @param label the label to set
*/
public void setLabel(final String label) {
this.label = label;
}
/**
* @return the action
*/
public String getAction() {
return action;
}
/**
* @param action the action to set
*/
public void setAction(final String action) {
this.action = action;
}
/**
* @return the isCurrentPage
*/
public Boolean getIsCurrentPage() {
return isCurrentPage;
}
/**
* @param isCurrentPage the isCurrentPage to set
*/
public void setIsCurrentPage(final Boolean isCurrentPage) {
this.isCurrentPage = isCurrentPage;
}
/**
* @return the title
*/
public String getTitle() {
return title;
}
/**
* @param title the title to set
*/
public void setTitle(final String title) {
this.title = title;
}
}
| |
/***
Copyright (c) 2015 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.commonsware.cwac.cam2;
import android.content.pm.PackageInstaller;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.os.Build;
import android.view.View;
import com.commonsware.cwac.cam2.plugin.OrientationPlugin;
import com.commonsware.cwac.cam2.plugin.SizeAndFormatPlugin;
import com.commonsware.cwac.cam2.util.Size;
import com.commonsware.cwac.cam2.util.Utils;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Queue;
import de.greenrobot.event.EventBus;
/**
* Controller for camera-related functions, designed to be used
* by CameraFragment or the equivalent.
*/
public class CameraController implements CameraView.StateCallback {
private CameraEngine engine;
private CameraSession session;
private List<CameraDescriptor> cameras=null;
private int currentCamera=0;
private final HashMap<CameraDescriptor, CameraView> previews=
new HashMap<CameraDescriptor, CameraView>();
private Queue<CameraView> availablePreviews=null;
private boolean switchPending=false;
private boolean isVideoRecording=false;
/**
* @return the engine being used by this fragment to access
* the camera(s) on the device
*/
public CameraEngine getEngine() {
return(engine);
}
/**
* Setter for the engine. Must be called before onCreateView()
* is called, preferably shortly after constructing the
* fragment.
*
* @param engine the engine to be used by this fragment to access
* the camera(s) on the device
*/
public void setEngine(CameraEngine engine, CameraSelectionCriteria criteria) {
this.engine=engine;
EventBus.getDefault().register(this);
engine.loadCameraDescriptors(criteria);
}
public int getNumberOfCameras() {
return(cameras==null ? 0 : cameras.size());
}
/**
* Call this from onStart() of an activity or fragment, or from
* an equivalent point in time. If the CameraView is ready,
* the preview should begin; otherwise, the preview will
* begin after the CameraView is ready.
*/
public void start() {
if (cameras!=null) {
CameraDescriptor camera=cameras.get(currentCamera);
CameraView cv=getPreview(camera);
if (cv.isAvailable()) {
open();
}
}
}
/**
* Call this from onStop() of an activity or fragment, or
* from an equivalent point in time, to indicate that you want
* the camera preview to stop.
*/
public void stop() {
if (session!=null) {
CameraSession temp=session;
session=null;
engine.close(temp);
// session.destroy(); -- moved into engines
}
}
/**
* Call this from onDestroy() of an activity or fragment,
* or from an equivalent point in time, to tear down the
* entire controller. A fresh controller should
* be created if you want to use the camera again in the future.
*/
public void destroy() {
EventBus.getDefault().post(new ControllerDestroyedEvent(this));
EventBus.getDefault().unregister(this);
}
/**
* Call to switch to the next camera in sequence. Most
* devices have only two cameras, and so calling this will
* switch the preview and pictures to the camera other than
* the one presently being used.
*/
public void switchCamera() {
if (session!=null) {
getPreview(session.getDescriptor()).setVisibility(View.INVISIBLE);
switchPending=true;
stop();
}
}
/**
* Supplies CameraView objects for each camera. After this,
* we can open() the camera.
*
* @param cameraViews a list of CameraViews
*/
public void setCameraViews(Queue<CameraView> cameraViews) {
availablePreviews=cameraViews;
previews.clear();
for (CameraView cv : cameraViews) {
cv.setStateCallback(this);
}
open(); // in case visible CameraView is already ready
}
/**
* Public because Java interfaces are intrinsically public.
* This method is not part of the class' API and should not
* be used by third-party developers.
*
* @param cv the CameraView that is now ready
*/
@Override
public void onReady(CameraView cv) {
if (cameras!=null) {
open();
}
}
/**
* Public because Java interfaces are intrinsically public.
* This method is not part of the class' API and should not
* be used by third-party developers.
*
* @param cv the CameraView that is now destroyed
*/
@Override
public void onDestroyed(CameraView cv) {
stop();
}
/**
* Takes a picture, in accordance with the details supplied
* in the PictureTransaction. Subscribe to the
* PictureTakenEvent to get the results of the picture.
*
* @param xact a PictureTransaction describing what should be taken
*/
public void takePicture(PictureTransaction xact) {
if (session!=null) {
engine.takePicture(session, xact);
}
}
public void recordVideo(VideoTransaction xact) throws Exception {
if (session!=null) {
engine.recordVideo(session, xact);
isVideoRecording=true;
}
}
public void stopVideoRecording() throws Exception {
if (session!=null && isVideoRecording) {
try {
engine.stopVideoRecording(session);
}
finally {
isVideoRecording=false;
}
}
}
private CameraView getPreview(CameraDescriptor camera) {
CameraView result=previews.get(camera);
if (result==null) {
result=availablePreviews.remove();
previews.put(camera, result);
}
return(result);
}
private int getNextCameraIndex() {
int next=currentCamera+1;
if (next==cameras.size()) {
next=0;
}
return(next);
}
private void open() {
if (session==null) {
Size previewSize=null;
CameraDescriptor camera=cameras.get(currentCamera);
CameraView cv=getPreview(camera);
Size largest=Utils.getLargestPictureSize(camera);
if (camera != null && cv.getWidth() > 0 && cv.getHeight() > 0) {
previewSize=Utils.chooseOptimalSize(camera.getPreviewSizes(),
cv.getWidth(), cv.getHeight(), largest);
cv.setPreviewSize(previewSize);
}
SurfaceTexture texture=cv.getSurfaceTexture();
if (previewSize != null && texture != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
texture.setDefaultBufferSize(previewSize.getWidth(),
previewSize.getHeight());
}
session=engine
.buildSession(cv.getContext(), camera)
.addPlugin(new SizeAndFormatPlugin(previewSize,
largest, ImageFormat.JPEG))
.addPlugin(new OrientationPlugin(cv.getContext()))
.build();
engine.open(session, texture);
}
}
}
@SuppressWarnings("unused")
public void onEventMainThread(CameraEngine.CameraDescriptorsEvent event) {
if (event.descriptors.size()>0) {
cameras=event.descriptors;
EventBus.getDefault().post(new ControllerReadyEvent(this, cameras.size()));
}
else {
EventBus.getDefault().post(new NoSuchCameraEvent());
}
}
@SuppressWarnings("unused")
public void onEventMainThread(CameraEngine.ClosedEvent event) {
if (switchPending) {
switchPending=false;
currentCamera=getNextCameraIndex();
getPreview(cameras.get(currentCamera)).setVisibility(View.VISIBLE);
open();
}
}
/**
* Raised if there are no available cameras on this
* device. Consider using uses-feature elements in the
* manifest, so your app only runs on devices that have
* a camera, if you need a camera.
*/
public static class NoSuchCameraEvent {
}
/**
* Event raised when the controller has its cameras
* and is ready for use. Clients should then turn
* around and call setCameraViews() to complete the process
* and start showing the first preview.
*/
public static class ControllerReadyEvent {
final private int cameraCount;
final private CameraController ctlr;
private ControllerReadyEvent(CameraController ctlr, int cameraCount) {
this.cameraCount=cameraCount;
this.ctlr=ctlr;
}
public int getNumberOfCameras() {
return(cameraCount);
}
public boolean isEventForController(CameraController ctlr) {
return(this.ctlr==ctlr);
}
}
/**
* Event raised when the controller has its cameras
* and is ready for use. Clients should then turn
* around and call setCameraViews() to complete the process
* and start showing the first preview.
*/
public static class ControllerDestroyedEvent {
private final CameraController ctlr;
ControllerDestroyedEvent(CameraController ctlr) {
this.ctlr=ctlr;
}
public CameraController getDestroyedController() {
return(ctlr);
}
}
}
| |
package com.microsoft.bingads.examples.v9;
import java.rmi.*;
import com.microsoft.bingads.*;
import com.microsoft.bingads.campaignmanagement.*;
public class NegativeKeywords extends ExampleBaseV9 {
static AuthorizationData authorizationData;
static ServiceClient<ICampaignManagementService> CampaignService;
public static void main(java.lang.String[] args) {
try
{
authorizationData = new AuthorizationData();
authorizationData.setDeveloperToken(DeveloperToken);
authorizationData.setAuthentication(new PasswordAuthentication(UserName, Password));
authorizationData.setCustomerId(CustomerId);
authorizationData.setAccountId(AccountId);
CampaignService = new ServiceClient<ICampaignManagementService>(
authorizationData,
ICampaignManagementService.class);
// Specify a campaign.
ArrayOfCampaign campaigns = new ArrayOfCampaign();
Campaign campaign = new Campaign();
campaign.setName("Winter Clothing " + System.currentTimeMillis());
campaign.setDescription("Winter clothing line.");
campaign.setBudgetType(BudgetLimitType.MONTHLY_BUDGET_SPEND_UNTIL_DEPLETED);
campaign.setMonthlyBudget(1000.00);
campaign.setTimeZone("PacificTimeUSCanadaTijuana");
campaign.setDaylightSaving(true);
campaigns.getCampaigns().add(campaign);
// Add the campaign
ArrayOflong campaignIds = addCampaigns(AccountId, campaigns);
printCampaignIdentifiers(campaignIds);
// You may choose to associate an exclusive set of negative keywords to an individual campaign
// or ad group. An exclusive set of negative keywords cannot be shared with other campaigns
// or ad groups. This sample only associates negative keywords with a campaign.
ArrayOfNegativeKeyword negativeKeywords = new ArrayOfNegativeKeyword();
NegativeKeyword negativeKeyword = new NegativeKeyword();
negativeKeyword.setMatchType(MatchType.PHRASE);
negativeKeyword.setText("auto");
negativeKeywords.getNegativeKeywords().add(negativeKeyword);
ArrayOfEntityNegativeKeyword entityNegativeKeywords = new ArrayOfEntityNegativeKeyword();
EntityNegativeKeyword entityNegativeKeyword = new EntityNegativeKeyword();
entityNegativeKeyword.setEntityId(campaignIds.getLongs().get(0));
entityNegativeKeyword.setEntityType("Campaign");
entityNegativeKeyword.setNegativeKeywords(negativeKeywords);
entityNegativeKeywords.getEntityNegativeKeywords().add(entityNegativeKeyword);
AddNegativeKeywordsToEntitiesResponse addNegativeKeywordsToEntitiesResponse =
addNegativeKeywordsToEntities(entityNegativeKeywords);
if (addNegativeKeywordsToEntitiesResponse.getNestedPartialErrors() == null
|| addNegativeKeywordsToEntitiesResponse.getNestedPartialErrors().getBatchErrorCollections().size() == 0)
{
System.out.println("Added an exclusive set of negative keywords to the Campaign.\n");
printNegativeKeywordIds(addNegativeKeywordsToEntitiesResponse.getNegativeKeywordIds());
}
else
{
printNestedPartialErrors(addNegativeKeywordsToEntitiesResponse.getNestedPartialErrors());
}
GetNegativeKeywordsByEntityIdsResponse getNegativeKeywordsByEntityIdsResponse =
getNegativeKeywordsByEntityIds(campaignIds, "Campaign", AccountId);
if (getNegativeKeywordsByEntityIdsResponse.getPartialErrors() == null
|| getNegativeKeywordsByEntityIdsResponse.getPartialErrors().getBatchErrors().size() == 0)
{
System.out.println("Retrieved an exclusive set of negative keywords for the Campaign.\n");
printEntityNegativeKeywords(getNegativeKeywordsByEntityIdsResponse.getEntityNegativeKeywords());
}
else
{
printPartialErrors(getNegativeKeywordsByEntityIdsResponse.getPartialErrors());
}
// If you attempt to delete a negative keyword without an identifier the operation will
// succeed but will return partial errors corresponding to the index of the negative keyword
// that was not deleted.
ArrayOfBatchErrorCollection nestedPartialErrors = deleteNegativeKeywordsFromEntities(entityNegativeKeywords);
if (nestedPartialErrors == null || nestedPartialErrors.getBatchErrorCollections().size() == 0)
{
System.out.println("Deleted an exclusive set of negative keywords from the Campaign.\n");
}
else
{
System.out.println("Attempt to DeleteNegativeKeywordsFromEntities without NegativeKeyword identifier partially fails by design.\n");
printNestedPartialErrors(nestedPartialErrors);
}
// Delete the negative keywords with identifiers that were returned above.
nestedPartialErrors = deleteNegativeKeywordsFromEntities(
getNegativeKeywordsByEntityIdsResponse.getEntityNegativeKeywords());
if (nestedPartialErrors == null || nestedPartialErrors.getBatchErrorCollections().size() == 0)
{
System.out.println("Deleted an exclusive set of negative keywords from the Campaign.\n");
}
else
{
printNestedPartialErrors(nestedPartialErrors);
}
// Negative keywords can also be added and deleted from a shared negative keyword list.
// The negative keyword list can be shared or associated with multiple campaigns.
// NegativeKeywordList inherits from SharedList which inherits from SharedEntity.
NegativeKeywordList negativeKeywordList = new NegativeKeywordList();
negativeKeywordList.setName("My Negative Keyword List " + System.currentTimeMillis());
negativeKeywordList.setType("NegativeKeywordList");
ArrayOfSharedListItem sharedListItems = new ArrayOfSharedListItem();
NegativeKeyword negativeKeyword1 = new NegativeKeyword();
negativeKeyword1.setText("car");
negativeKeyword1.setType("NegativeKeyword");
negativeKeyword1.setMatchType(MatchType.EXACT);
sharedListItems.getSharedListItems().add(negativeKeyword1);
NegativeKeyword negativeKeyword2 = new NegativeKeyword();
negativeKeyword2.setText("car");
negativeKeyword2.setType("NegativeKeyword");
negativeKeyword2.setMatchType(MatchType.PHRASE);
sharedListItems.getSharedListItems().add(negativeKeyword2);
// You can create a new list for negative keywords with or without negative keywords.
AddSharedEntityResponse addSharedEntityResponse = addSharedEntity(negativeKeywordList, sharedListItems);
long sharedEntityId = addSharedEntityResponse.getSharedEntityId();
ArrayOflong listItemIds = addSharedEntityResponse.getListItemIds();
System.out.printf("NegativeKeywordList successfully added to account library and assigned identifer %d\n\n", sharedEntityId);
printNegativeKeywordResults(
sharedEntityId,
sharedListItems,
listItemIds,
addSharedEntityResponse.getPartialErrors());
System.out.println("Negative keywords currently in NegativeKeywordList:");
negativeKeywordList.setId(sharedEntityId);
sharedListItems = getListItemsBySharedList(negativeKeywordList);
if (sharedListItems == null || sharedListItems.getSharedListItems().size() == 0)
{
System.out.println("None\n");
}
else
{
printSharedListItems(sharedListItems);
}
// To update the list of negative keywords, you must either add or remove from the list
// using the respective AddListItemsToSharedList or DeleteListItemsFromSharedList operations.
// To remove the negative keywords from the list pass the negative keyword identifers
// and negative keyword list (SharedEntity) identifer.
ArrayOfBatchError partialErrors = deleteListItemsFromSharedList(listItemIds, negativeKeywordList);
if (partialErrors == null || partialErrors.getBatchErrors().size() == 0)
{
System.out.println("Deleted most recently added negative keywords from negative keyword list.\n");
}
else
{
printPartialErrors(partialErrors);
}
System.out.println("Negative keywords currently in NegativeKeywordList:");
sharedListItems = getListItemsBySharedList(negativeKeywordList);
if (sharedListItems == null || sharedListItems.getSharedListItems().size() == 0)
{
System.out.println("None\n");
}
else
{
printSharedListItems(sharedListItems);
}
// Whether you created the list with or without negative keywords, more can be added
// using the AddListItemsToSharedList operation.
sharedListItems = new ArrayOfSharedListItem();
negativeKeyword1 = new NegativeKeyword();
negativeKeyword1.setText("auto");
negativeKeyword1.setType("NegativeKeyword");
negativeKeyword1.setMatchType(MatchType.EXACT);
sharedListItems.getSharedListItems().add(negativeKeyword1);
negativeKeyword2 = new NegativeKeyword();
negativeKeyword2.setText("auto");
negativeKeyword2.setType("NegativeKeyword");
negativeKeyword2.setMatchType(MatchType.PHRASE);
sharedListItems.getSharedListItems().add(negativeKeyword2);
AddListItemsToSharedListResponse addListItemsToSharedListResponse = addListItemsToSharedList(
sharedListItems,
negativeKeywordList);
listItemIds = addListItemsToSharedListResponse.getListItemIds();
printNegativeKeywordResults(
sharedEntityId,
sharedListItems,
listItemIds,
addListItemsToSharedListResponse.getPartialErrors());
System.out.println("Negative keywords currently in NegativeKeywordList:");
sharedListItems = getListItemsBySharedList(negativeKeywordList);
if (sharedListItems == null || sharedListItems.getSharedListItems().size() == 0)
{
System.out.println("None\n");
}
else
{
printSharedListItems(sharedListItems);
}
// You can update the name of the negative keyword list.
negativeKeywordList.setName("My Updated Negative Keyword List");
negativeKeywordList.setType("NegativeKeywordList");
ArrayOfSharedEntity sharedEntities = new ArrayOfSharedEntity();
sharedEntities.getSharedEntities().add(negativeKeywordList);
partialErrors = updateSharedEntities(sharedEntities);
if (partialErrors == null || partialErrors.getBatchErrors().size() == 0)
{
System.out.printf("Updated Negative Keyword List Name to %s.\n\n", negativeKeywordList.getName());
}
else
{
printPartialErrors(partialErrors);
}
// Get and print the negative keyword lists and return the list of identifiers.
final java.lang.String sharedEntityType = "NegativeKeywordList";
ArrayOflong sharedEntityIds = getAndPrintSharedEntityIdentifiers(sharedEntityType);
// Negative keywords were added to the negative keyword list above. You can associate the
// shared list of negative keywords with a campaign with or without negative keywords.
// Shared negative keyword lists cannot be associated with an ad group. An ad group can only
// be assigned an exclusive set of negative keywords.
ArrayOfSharedEntityAssociation associations = new ArrayOfSharedEntityAssociation();
SharedEntityAssociation association = new SharedEntityAssociation();
association.setEntityId(campaignIds.getLongs().get(0));
association.setEntityType("Campaign");
association.setSharedEntityId(sharedEntityId);
association.setSharedEntityType("NegativeKeywordList");
associations.getSharedEntityAssociations().add(association);
partialErrors = setSharedEntityAssociations(associations);
if (partialErrors == null || partialErrors.getBatchErrors().size() == 0)
{
System.out.printf("Associated CampaignId %d with Negative Keyword List Id %d.\n\n",
campaignIds.getLongs().get(0), sharedEntityId);
}
else
{
printPartialErrors(partialErrors);
}
// Get and print the associations either by Campaign or NegativeKeywordList identifier.
GetSharedEntityAssociationsByEntityIdsResponse getSharedEntityAssociationsByEntityIdsResponse =
getSharedEntityAssociationsByEntityIds(campaignIds, "Campaign", "NegativeKeywordList");
printSharedEntityAssociations(getSharedEntityAssociationsByEntityIdsResponse.getAssociations());
printPartialErrors(getSharedEntityAssociationsByEntityIdsResponse.getPartialErrors());
// Get the associations of the most recently added shared entity.
ArrayOflong associatedSharedEntityIds = new ArrayOflong();
associatedSharedEntityIds.getLongs().add(sharedEntityIds.getLongs().get(sharedEntityIds.getLongs().size()-1));
GetSharedEntityAssociationsBySharedEntityIdsResponse getSharedEntityAssociationsBySharedEntityIdsResponse =
getSharedEntityAssociationsBySharedEntityIds("Campaign", associatedSharedEntityIds, "NegativeKeywordList");
printSharedEntityAssociations(getSharedEntityAssociationsBySharedEntityIdsResponse.getAssociations());
printPartialErrors(getSharedEntityAssociationsBySharedEntityIdsResponse.getPartialErrors());
// Explicitly delete the association between the campaign and the negative keyword list.
partialErrors = deleteSharedEntityAssociations(associations);
if (partialErrors == null || partialErrors.getBatchErrors().size() == 0)
{
System.out.println("Deleted NegativeKeywordList associations\n");
}
else
{
printPartialErrors(partialErrors);
}
// Delete the campaign and any remaining assocations.
deleteCampaigns(AccountId, campaignIds);
System.out.printf("Deleted CampaignId %d\n\n", campaignIds.getLongs().get(0));
// DeleteCampaigns does not delete the negative keyword list from the account's library.
// Call the DeleteSharedEntities operation to delete the shared entities.
partialErrors = deleteSharedEntities(sharedEntities);
if (partialErrors == null || partialErrors.getBatchErrors().size() == 0)
{
System.out.printf("Deleted Negative Keyword List (SharedEntity) Id %d\n\n", sharedEntityId);
}
else
{
printPartialErrors(partialErrors);
}
// Campaign Management service operations can throw AdApiFaultDetail.
} catch (AdApiFaultDetail_Exception ex) {
System.out.println("The operation failed with the following faults:\n");
for (AdApiError error : ex.getFaultInfo().getErrors().getAdApiErrors())
{
System.out.printf("AdApiError\n");
System.out.printf("Code: %d\nError Code: %s\nMessage: %s\n\n", error.getCode(), error.getErrorCode(), error.getMessage());
}
// Campaign Management service operations can throw ApiFaultDetail.
} catch (ApiFaultDetail_Exception ex) {
System.out.println("The operation failed with the following faults:\n");
for (BatchError error : ex.getFaultInfo().getBatchErrors().getBatchErrors())
{
System.out.printf("BatchError at Index: %d\n", error.getIndex());
System.out.printf("Code: %d\nMessage: %s\n\n", error.getCode(), error.getMessage());
}
for (OperationError error : ex.getFaultInfo().getOperationErrors().getOperationErrors())
{
System.out.printf("OperationError\n");
System.out.printf("Code: %d\nMessage: %s\n\n", error.getCode(), error.getMessage());
}
} catch (RemoteException ex) {
System.out.println("Service communication error encountered: ");
System.out.println(ex.getMessage());
ex.printStackTrace();
} catch (Exception ex) {
System.out.println("Error encountered: ");
System.out.println(ex.getMessage());
ex.printStackTrace();
}
}
// Adds one or more campaigns to the specified account.
static ArrayOflong addCampaigns(long accountId, ArrayOfCampaign campaigns) throws RemoteException, Exception
{
AddCampaignsRequest request = new AddCampaignsRequest();
request.setAccountId(accountId);
request.setCampaigns(campaigns);
return CampaignService.getService().addCampaigns(request).getCampaignIds();
}
// Deletes one or more campaigns from the specified account.
static void deleteCampaigns(long accountId, ArrayOflong campaignIds) throws RemoteException, Exception
{
DeleteCampaignsRequest request = new DeleteCampaignsRequest();
request.setAccountId(accountId);
request.setCampaignIds(campaignIds);
CampaignService.getService().deleteCampaigns(request);
}
// Adds list items such as negative keywords to the corresponding list.
static AddListItemsToSharedListResponse addListItemsToSharedList(
ArrayOfSharedListItem listItems,
SharedList sharedList) throws RemoteException, Exception
{
AddListItemsToSharedListRequest request = new AddListItemsToSharedListRequest();
request.setListItems(listItems);
request.setSharedList(sharedList);
return CampaignService.getService().addListItemsToSharedList(request);
}
// Adds a shared entity such as a negative keyword list to the account's library.
// Lists in the account's library can be associated with any campaign within the account.
static AddSharedEntityResponse addSharedEntity(
SharedEntity sharedEntity,
ArrayOfSharedListItem listItems) throws RemoteException, Exception
{
AddSharedEntityRequest request = new AddSharedEntityRequest();
request.setSharedEntity(sharedEntity);
request.setListItems(listItems);
return CampaignService.getService().addSharedEntity(request);
}
// Deletes list items such as negative keywords from the corresponding list.
static ArrayOfBatchError deleteListItemsFromSharedList(
ArrayOflong listItemIds,
SharedList sharedList) throws RemoteException, Exception
{
DeleteListItemsFromSharedListRequest request = new DeleteListItemsFromSharedListRequest();
request.setListItemIds(listItemIds);
request.setSharedList(sharedList);
return CampaignService.getService().deleteListItemsFromSharedList(request).getPartialErrors();
}
// Deletes shared entities such as negative keyword lists from the account's library.
static ArrayOfBatchError deleteSharedEntities(ArrayOfSharedEntity sharedEntities) throws RemoteException, Exception
{
DeleteSharedEntitiesRequest request = new DeleteSharedEntitiesRequest();
request.setSharedEntities(sharedEntities);
return CampaignService.getService().deleteSharedEntities(request).getPartialErrors();
}
// Removes the association between a shared entity such as a negative keyword list and an entity such as a campaign.
static ArrayOfBatchError deleteSharedEntityAssociations(ArrayOfSharedEntityAssociation associations) throws RemoteException, Exception
{
DeleteSharedEntityAssociationsRequest request = new DeleteSharedEntityAssociationsRequest();
request.setAssociations(associations);
return CampaignService.getService().deleteSharedEntityAssociations(request).getPartialErrors();
}
// Gets the list items such as the negative keywords of a negative keyword list.
static ArrayOfSharedListItem getListItemsBySharedList(SharedList sharedList) throws RemoteException, Exception
{
GetListItemsBySharedListRequest request = new GetListItemsBySharedListRequest();
request.setSharedList(sharedList);
return CampaignService.getService().getListItemsBySharedList(request).getListItems();
}
// Gets the shared entities such as negative keyword lists from the account's library.
static ArrayOfSharedEntity getSharedEntitiesByAccountId(java.lang.String sharedEntityType) throws RemoteException, Exception
{
GetSharedEntitiesByAccountIdRequest request = new GetSharedEntitiesByAccountIdRequest();
request.setSharedEntityType(sharedEntityType);
return CampaignService.getService().getSharedEntitiesByAccountId(request).getSharedEntities();
}
// Gets associations between a campaign and a shared entity such as a negative keyword list.
// You can request associations by associated entity identifiers.
static GetSharedEntityAssociationsByEntityIdsResponse getSharedEntityAssociationsByEntityIds(
ArrayOflong entityIds,
java.lang.String entityType,
java.lang.String sharedEntityType) throws RemoteException, Exception
{
GetSharedEntityAssociationsByEntityIdsRequest request = new GetSharedEntityAssociationsByEntityIdsRequest();
request.setEntityIds(entityIds);
request.setEntityType(entityType);
request.setSharedEntityType(sharedEntityType);
return CampaignService.getService().getSharedEntityAssociationsByEntityIds(request);
}
// Gets associations between a campaign and a shared entity such as a negative keyword list.
// You can request associations by shared entity identifiers.
static GetSharedEntityAssociationsBySharedEntityIdsResponse getSharedEntityAssociationsBySharedEntityIds(
java.lang.String entityType,
ArrayOflong sharedEntityIds,
java.lang.String sharedEntityType) throws RemoteException, Exception
{
GetSharedEntityAssociationsBySharedEntityIdsRequest request = new GetSharedEntityAssociationsBySharedEntityIdsRequest();
request.setEntityType(entityType);
request.setSharedEntityIds(sharedEntityIds);
request.setSharedEntityType(sharedEntityType);
return CampaignService.getService().getSharedEntityAssociationsBySharedEntityIds(request);
}
// Sets the association between a campaign and a shared entity such as a negative keyword list.
static ArrayOfBatchError setSharedEntityAssociations(ArrayOfSharedEntityAssociation associations) throws RemoteException, Exception
{
SetSharedEntityAssociationsRequest request = new SetSharedEntityAssociationsRequest();
request.setAssociations(associations);
return CampaignService.getService().setSharedEntityAssociations(request).getPartialErrors();
}
// Updates shared entities such as negative keyword lists within the account's library.
static ArrayOfBatchError updateSharedEntities(ArrayOfSharedEntity sharedEntities) throws RemoteException, Exception
{
UpdateSharedEntitiesRequest request = new UpdateSharedEntitiesRequest();
request.setSharedEntities(sharedEntities);
return CampaignService.getService().updateSharedEntities(request).getPartialErrors();
}
// Adds negative keywords to the specified campaign or ad group.
static AddNegativeKeywordsToEntitiesResponse addNegativeKeywordsToEntities(ArrayOfEntityNegativeKeyword entityNegativeKeywords) throws RemoteException, Exception
{
AddNegativeKeywordsToEntitiesRequest request = new AddNegativeKeywordsToEntitiesRequest();
request.setEntityNegativeKeywords(entityNegativeKeywords);
return CampaignService.getService().addNegativeKeywordsToEntities(request);
}
// Deletes negative keywords from the specified campaign or ad group.
static ArrayOfBatchErrorCollection deleteNegativeKeywordsFromEntities(ArrayOfEntityNegativeKeyword entityNegativeKeywords) throws RemoteException, Exception
{
DeleteNegativeKeywordsFromEntitiesRequest request = new DeleteNegativeKeywordsFromEntitiesRequest();
request.setEntityNegativeKeywords(entityNegativeKeywords);
return CampaignService.getService().deleteNegativeKeywordsFromEntities(request).getNestedPartialErrors();
}
// Gets the negative keywords that are only associated with the specified campaigns or ad groups.
static GetNegativeKeywordsByEntityIdsResponse getNegativeKeywordsByEntityIds(
ArrayOflong entityIds,
java.lang.String entityType,
long parentEntityId) throws RemoteException, Exception
{
GetNegativeKeywordsByEntityIdsRequest request = new GetNegativeKeywordsByEntityIdsRequest();
request.setEntityIds(entityIds);
request.setEntityType(entityType);
request.setParentEntityId(parentEntityId);
return CampaignService.getService().getNegativeKeywordsByEntityIds(request);
}
// Prints the negative keyword identifiers added to each campaign or ad group entity.
// The IdCollection items are available by calling AddNegativeKeywordsToEntities.
static void printNegativeKeywordIds(ArrayOfIdCollection idCollections)
{
if (idCollections == null)
{
return;
}
for (int index = 0; index < idCollections.getIdCollections().size(); index++)
{
System.out.printf("NegativeKeyword Ids at entity index %d:\n\n", index);
for (long id : idCollections.getIdCollections().get(index).getIds().getLongs())
{
System.out.printf("\tId: %d\n\n", id);
}
}
}
// Prints the negative keywords
static void printNegativeKeywords(ArrayOfNegativeKeyword negativeKeywords)
{
if (negativeKeywords == null)
{
return;
}
for (NegativeKeyword negativeKeyword : negativeKeywords.getNegativeKeywords())
{
System.out.printf("\tNegativeKeyword Text: %s\n", negativeKeyword.getText());
System.out.printf("\tId: %d\n", negativeKeyword.getId());
System.out.printf("\tMatchType: %s\n\n", negativeKeyword.getMatchType());
}
}
// Prints the shared list items e.g. negative keywords
static void printSharedListItems(ArrayOfSharedListItem sharedListItems)
{
if (sharedListItems == null)
{
return;
}
for (SharedListItem sharedListItem : sharedListItems.getSharedListItems())
{
if(sharedListItem instanceof NegativeKeyword)
{
System.out.printf("\tNegativeKeyword Text: %s\n", ((NegativeKeyword)sharedListItem).getText());
System.out.printf("\tId: %d\n", ((NegativeKeyword)sharedListItem).getId());
System.out.printf("\tMatchType: %s\n\n", ((NegativeKeyword)sharedListItem).getMatchType());
}
}
}
// Prints a list of EntityNegativeKeyword objects
static void printEntityNegativeKeywords(ArrayOfEntityNegativeKeyword entityNegativeKeywords)
{
if (entityNegativeKeywords == null)
{
return;
}
System.out.println("EntityNegativeKeyword items:\n");
for (EntityNegativeKeyword entityNegativeKeyword : entityNegativeKeywords.getEntityNegativeKeywords())
{
System.out.printf("\tEntityId: %d\n", entityNegativeKeyword.getEntityId());
System.out.printf("\tEntityType: %s\n\n", entityNegativeKeyword.getEntityType());
printNegativeKeywords(entityNegativeKeyword.getNegativeKeywords());
}
}
// Prints a list of EntityNegativeKeyword objects
static ArrayOflong getAndPrintSharedEntityIdentifiers(java.lang.String sharedEntityType) throws RemoteException, Exception
{
ArrayOfSharedEntity sharedEntities = getSharedEntitiesByAccountId(sharedEntityType);
ArrayOflong sharedEntityIds = new ArrayOflong();
for (int index = 0; index < sharedEntities.getSharedEntities().size(); index++)
{
SharedEntity sharedEntity = sharedEntities.getSharedEntities().get(index);
if (sharedEntity.getId() != null)
{
sharedEntityIds.getLongs().add((long)sharedEntity.getId());
System.out.printf("SharedEntity[%d] (%s) has SharedEntity Id %d.\n\n",
index,
sharedEntities.getSharedEntities().get(index).getName(),
sharedEntities.getSharedEntities().get(index).getId());
}
}
return sharedEntityIds;
}
// Prints the campaign identifiers for each campaign added.
static void printCampaignIdentifiers(ArrayOflong campaignIds)
{
if (campaignIds == null)
{
return;
}
for (long id : campaignIds.getLongs())
{
System.out.printf("Campaign successfully added and assigned CampaignId %d\n\n", id);
}
}
// Prints a list of BatchError objects that represent partial errors while managing negative keywords.
static void printPartialErrors(ArrayOfBatchError partialErrors)
{
if (partialErrors == null || partialErrors.getBatchErrors().size() == 0)
{
return;
}
System.out.println("BatchError (PartialErrors) items:\n");
for (BatchError error : partialErrors.getBatchErrors())
{
System.out.printf("\tIndex: %d\n", error.getIndex());
System.out.printf("\tCode: %d\n", error.getCode());
System.out.printf("\tErrorCode: %s\n", error.getErrorCode());
System.out.printf("\tMessage: %s\n\n", error.getMessage());
// In the case of an EditorialError, more details are available
if (error.getType().equals("EditorialError") && error.getErrorCode().equals("CampaignServiceEditorialValidationError"))
{
System.out.printf("\tDisapprovedText: %s\n", ((EditorialError)(error)).getDisapprovedText());
System.out.printf("\tLocation: %s\n", ((EditorialError)(error)).getLocation());
System.out.printf("\tPublisherCountry: %s\n", ((EditorialError)(error)).getPublisherCountry());
System.out.printf("\tReasonCode: %d\n\n", ((EditorialError)(error)).getReasonCode());
}
}
}
// Prints the list item identifiers, as well as any partial errors
static void printNegativeKeywordResults(
long sharedListId,
ArrayOfSharedListItem sharedListItems,
ArrayOflong sharedListItemIds,
ArrayOfBatchError partialErrors)
{
if (sharedListItemIds == null)
{
return;
}
for (int index = 0; index < sharedListItems.getSharedListItems().size(); index++)
{
// Determine if the SharedListItem is a NegativeKeyword.
if (sharedListItems.getSharedListItems().get(index) instanceof NegativeKeyword)
{
// Determine if the corresponding index has a valid identifier
if(sharedListItemIds.getLongs().get(index) > 0)
{
System.out.printf("NegativeKeyword[%d] (%s) successfully added to NegativeKeywordList (%d) and assigned Negative Keyword Id %d.\n",
index,
((NegativeKeyword)(sharedListItems.getSharedListItems().get(index))).getText(),
sharedListId,
sharedListItemIds.getLongs().get(index));
}
}
else
{
System.out.println("SharedListItem is not a NegativeKeyword.");
}
}
System.out.println();
printPartialErrors(partialErrors);
}
// Prints a list of SharedEntityAssociation objects.
static void printSharedEntityAssociations(ArrayOfSharedEntityAssociation associations)
{
if (associations == null || associations.getSharedEntityAssociations().size() == 0)
{
return;
}
System.out.println("SharedEntityAssociation items:\n");
for (SharedEntityAssociation sharedEntityAssociation : associations.getSharedEntityAssociations())
{
System.out.printf("\tEntityId: %d\n", sharedEntityAssociation.getEntityId());
System.out.printf("\tEntityType: %s\n", sharedEntityAssociation.getEntityType());
System.out.printf("\tSharedEntityId: %d\n", sharedEntityAssociation.getSharedEntityId());
System.out.printf("\tSharedEntityType: %s\n\n", sharedEntityAssociation.getSharedEntityType());
}
}
// Prints a list of BatchErrorCollection objects that represent partial errors while managing
// negative keywords.
static void printNestedPartialErrors(ArrayOfBatchErrorCollection nestedPartialErrors)
{
if (nestedPartialErrors == null || nestedPartialErrors.getBatchErrorCollections().size() == 0)
{
return;
}
System.out.println("BatchErrorCollection (NestedPartialErrors) items:\n");
for (BatchErrorCollection collection : nestedPartialErrors.getBatchErrorCollections())
{
// The top level list index corresponds to the campaign or ad group index identifier.
if (collection != null)
{
if (collection.getCode() != null)
{
System.out.printf("\tIndex: %d\n", collection.getIndex());
System.out.printf("\tCode: %d\n", collection.getCode());
System.out.printf("\tErrorCode: %s\n", collection.getErrorCode());
System.out.printf("\tMessage: %s\n\n", collection.getMessage());
}
printPartialErrors(collection.getBatchErrors());
}
}
}
}
| |
package com.pushtorefresh.storio3.sqlite.operations.put;
import android.content.ContentValues;
import android.support.annotation.CheckResult;
import android.support.annotation.NonNull;
import com.pushtorefresh.storio3.StorIOException;
import com.pushtorefresh.storio3.operations.PreparedOperation;
import com.pushtorefresh.storio3.sqlite.Changes;
import com.pushtorefresh.storio3.Interceptor;
import com.pushtorefresh.storio3.sqlite.StorIOSQLite;
import com.pushtorefresh.storio3.sqlite.operations.internal.RxJavaUtils;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Completable;
import io.reactivex.Flowable;
import io.reactivex.Single;
import static com.pushtorefresh.storio3.internal.Checks.checkNotNull;
public class PreparedPutContentValuesIterable extends PreparedPut<PutResults<ContentValues>, Iterable<ContentValues>> {
@NonNull
private final Iterable<ContentValues> contentValuesIterable;
@NonNull
private final PutResolver<ContentValues> putResolver;
private final boolean useTransaction;
PreparedPutContentValuesIterable(
@NonNull StorIOSQLite storIOSQLite,
@NonNull Iterable<ContentValues> contentValuesIterable,
@NonNull PutResolver<ContentValues> putResolver,
boolean useTransaction) {
super(storIOSQLite);
this.contentValuesIterable = contentValuesIterable;
this.putResolver = putResolver;
this.useTransaction = useTransaction;
}
/**
* Creates {@link Flowable} which will perform Put Operation and send result to observer.
* <p>
* Returned {@link Flowable} will be "Cold Flowable", which means that it performs
* put only after subscribing to it. Also, it emits the result once.
* <p>
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>Operates on {@link StorIOSQLite#defaultRxScheduler()} if not {@code null}.</dd>
* </dl>
*
* @return non-null {@link Flowable} which will perform Put Operation.
* and send result to observer.
*/
@NonNull
@CheckResult
@Override
public Flowable<PutResults<ContentValues>> asRxFlowable(@NonNull BackpressureStrategy backpressureStrategy) {
return RxJavaUtils.createFlowable(storIOSQLite, this, backpressureStrategy);
}
/**
* Creates {@link Single} which will perform Put Operation lazily when somebody subscribes to it and send result to observer.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>Operates on {@link StorIOSQLite#defaultRxScheduler()} if not {@code null}.</dd>
* </dl>
*
* @return non-null {@link Single} which will perform Put Operation.
* And send result to observer.
*/
@NonNull
@CheckResult
@Override
public Single<PutResults<ContentValues>> asRxSingle() {
return RxJavaUtils.createSingle(storIOSQLite, this);
}
/**
* Creates {@link Completable} which will perform Put Operation lazily when somebody subscribes to it.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>Operates on {@link StorIOSQLite#defaultRxScheduler()} if not {@code null}.</dd>
* </dl>
*
* @return non-null {@link Completable} which will perform Put Operation.
*/
@NonNull
@CheckResult
@Override
public Completable asRxCompletable() {
return RxJavaUtils.createCompletable(storIOSQLite, this);
}
@NonNull
@Override
protected Interceptor getRealCallInterceptor() {
return new RealCallInterceptor();
}
@NonNull
@Override
public Iterable<ContentValues> getData() {
return contentValuesIterable;
}
private class RealCallInterceptor implements Interceptor {
@NonNull
@Override
public <Result, WrappedResult, Data> Result intercept(@NonNull PreparedOperation<Result, WrappedResult, Data> operation, @NonNull Chain chain) {
try {
final StorIOSQLite.LowLevel lowLevel = storIOSQLite.lowLevel();
final Map<ContentValues, PutResult> putResults = new HashMap<ContentValues, PutResult>();
if (useTransaction) {
lowLevel.beginTransaction();
}
boolean transactionSuccessful = false;
try {
for (ContentValues contentValues : contentValuesIterable) {
final PutResult putResult = putResolver.performPut(storIOSQLite, contentValues);
putResults.put(contentValues, putResult);
if (!useTransaction && (putResult.wasInserted() || putResult.wasUpdated())) {
final Changes changes = Changes.newInstance(
putResult.affectedTables(),
putResult.affectedTags()
);
lowLevel.notifyAboutChanges(changes);
}
}
if (useTransaction) {
lowLevel.setTransactionSuccessful();
transactionSuccessful = true;
}
} finally {
if (useTransaction) {
lowLevel.endTransaction();
if (transactionSuccessful) {
final Set<String> affectedTables = new HashSet<String>(1); // in most cases it will be 1 table
final Set<String> affectedTags = new HashSet<String>(1);
for (final ContentValues contentValues : putResults.keySet()) {
final PutResult putResult = putResults.get(contentValues);
if (putResult.wasInserted() || putResult.wasUpdated()) {
affectedTables.addAll(putResult.affectedTables());
affectedTags.addAll(putResult.affectedTags());
}
}
// IMPORTANT: Notifying about change should be done after end of transaction
// It'll reduce number of possible deadlock situations
if (!affectedTables.isEmpty() || !affectedTags.isEmpty()) {
lowLevel.notifyAboutChanges(Changes.newInstance(affectedTables, affectedTags));
}
}
}
}
return (Result) PutResults.newInstance(putResults);
} catch (Exception exception) {
throw new StorIOException("Error has occurred during Put operation. contentValues = " + contentValuesIterable, exception);
}
}
}
/**
* Builder for {@link PreparedPutContentValuesIterable}
*/
public static class Builder {
@NonNull
private final StorIOSQLite storIOSQLite;
@NonNull
private final Iterable<ContentValues> contentValuesIterable;
Builder(@NonNull StorIOSQLite storIOSQLite, @NonNull Iterable<ContentValues> contentValuesIterable) {
this.storIOSQLite = storIOSQLite;
this.contentValuesIterable = contentValuesIterable;
}
/**
* Required: Specifies {@link PutResolver} for Put Operation
* which allows you to customize behavior of Put Operation
*
* @param putResolver put resolver
* @return builder
* @see DefaultPutResolver
*/
@NonNull
public CompleteBuilder withPutResolver(@NonNull PutResolver<ContentValues> putResolver) {
checkNotNull(putResolver, "Please specify put resolver");
return new CompleteBuilder(
storIOSQLite,
contentValuesIterable,
putResolver
);
}
}
/**
* Compile-time safe part of {@link Builder}
*/
public static class CompleteBuilder {
@NonNull
private final StorIOSQLite storIOSQLite;
@NonNull
private final Iterable<ContentValues> contentValuesIterable;
@NonNull
private final PutResolver<ContentValues> putResolver;
private boolean useTransaction = true;
CompleteBuilder(@NonNull StorIOSQLite storIOSQLite, @NonNull Iterable<ContentValues> contentValuesIterable, @NonNull PutResolver<ContentValues> putResolver) {
this.storIOSQLite = storIOSQLite;
this.contentValuesIterable = contentValuesIterable;
this.putResolver = putResolver;
}
/**
* Optional: Defines that Put Operation will use transaction
* if it is supported by implementation of {@link StorIOSQLite}
* <p>
* By default, transaction will be used
*
* @return builder
*/
@NonNull
public CompleteBuilder useTransaction(boolean useTransaction) {
this.useTransaction = useTransaction;
return this;
}
/**
* Prepares Put Operation
*
* @return {@link PreparedPutContentValuesIterable} instance
*/
@NonNull
public PreparedPutContentValuesIterable prepare() {
return new PreparedPutContentValuesIterable(
storIOSQLite,
contentValuesIterable,
putResolver,
useTransaction
);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.math;
/**
* Static library that provides all operations related with division and modular
* arithmetic to {@link BigInteger}. Some methods are provided in both mutable
* and immutable way. There are several variants provided listed below:
*
* <ul type="circle">
* <li><b>Division</b>
* <ul type="circle">
* <li>{@link BigInteger} division and remainder by {@link BigInteger}.</li>
* <li>{@link BigInteger} division and remainder by {@code int}.</li>
* <li><i>gcd</i> between {@link BigInteger} numbers.</li>
* </ul>
* </li>
* <li><b>Modular arithmetic </b>
* <ul type="circle">
* <li>Modular exponentiation between {@link BigInteger} numbers.</li>
* <li>Modular inverse of a {@link BigInteger} numbers.</li>
* </ul>
* </li>
* </ul>
*/
class Division {
/**
* Divides the array 'a' by the array 'b' and gets the quotient and the
* remainder. Implements the Knuth's division algorithm. See D. Knuth, The
* Art of Computer Programming, vol. 2. Steps D1-D8 correspond the steps in
* the algorithm description.
*
* @param quot
* the quotient
* @param quotLength
* the quotient's length
* @param a
* the dividend
* @param aLength
* the dividend's length
* @param b
* the divisor
* @param bLength
* the divisor's length
* @return the remainder
*/
static int[] divide(int quot[], int quotLength, int a[], int aLength,
int b[], int bLength) {
int normA[] = new int[aLength + 1]; // the normalized dividend
// an extra byte is needed for correct shift
int normB[] = new int[bLength + 1]; // the normalized divisor;
int normBLength = bLength;
/*
* Step D1: normalize a and b and put the results to a1 and b1 the
* normalized divisor's first digit must be >= 2^31
*/
int divisorShift = Integer.numberOfLeadingZeros(b[bLength - 1]);
if (divisorShift != 0) {
BitLevel.shiftLeft(normB, b, 0, divisorShift);
BitLevel.shiftLeft(normA, a, 0, divisorShift);
} else {
System.arraycopy(a, 0, normA, 0, aLength);
System.arraycopy(b, 0, normB, 0, bLength);
}
int firstDivisorDigit = normB[normBLength - 1];
// Step D2: set the quotient index
int i = quotLength - 1;
int j = aLength;
while (i >= 0) {
// Step D3: calculate a guess digit guessDigit
int guessDigit = 0;
if (normA[j] == firstDivisorDigit) {
// set guessDigit to the largest unsigned int value
guessDigit = -1;
} else {
long product = (((normA[j] & 0xffffffffL) << 32) + (normA[j - 1] & 0xffffffffL));
long res = Division.divideLongByInt(product, firstDivisorDigit);
guessDigit = (int) res; // the quotient of divideLongByInt
int rem = (int) (res >> 32); // the remainder of
// divideLongByInt
// decrease guessDigit by 1 while leftHand > rightHand
if (guessDigit != 0) {
long leftHand = 0;
long rightHand = 0;
boolean rOverflowed = false;
guessDigit++; // to have the proper value in the loop
// below
do {
guessDigit--;
if (rOverflowed) {
break;
}
// leftHand always fits in an unsigned long
leftHand = (guessDigit & 0xffffffffL)
* (normB[normBLength - 2] & 0xffffffffL);
/*
* rightHand can overflow; in this case the loop
* condition will be true in the next step of the loop
*/
rightHand = ((long) rem << 32)
+ (normA[j - 2] & 0xffffffffL);
long longR = (rem & 0xffffffffL)
+ (firstDivisorDigit & 0xffffffffL);
/*
* checks that longR does not fit in an unsigned int;
* this ensures that rightHand will overflow unsigned
* long in the next step
*/
if (Integer.numberOfLeadingZeros((int) (longR >>> 32)) < 32) {
rOverflowed = true;
} else {
rem = (int) longR;
}
} while (((leftHand ^ 0x8000000000000000L) > (rightHand ^ 0x8000000000000000L)));
}
}
// Step D4: multiply normB by guessDigit and subtract the production
// from normA.
if (guessDigit != 0) {
int borrow = Division.multiplyAndSubtract(normA, j
- normBLength, normB, normBLength, guessDigit);
// Step D5: check the borrow
if (borrow != 0) {
// Step D6: compensating addition
guessDigit--;
long carry = 0;
for (int k = 0; k < normBLength; k++) {
carry += (normA[j - normBLength + k] & 0xffffffffL)
+ (normB[k] & 0xffffffffL);
normA[j - normBLength + k] = (int) carry;
carry >>>= 32;
}
}
}
if (quot != null) {
quot[i] = guessDigit;
}
// Step D7
j--;
i--;
}
/*
* Step D8: we got the remainder in normA. Denormalize it id needed
*/
if (divisorShift != 0) {
// reuse normB
BitLevel.shiftRight(normB, normBLength, normA, 0, divisorShift);
return normB;
}
System.arraycopy(normA, 0, normB, 0, bLength);
return normA;
}
/**
* Divides an array by an integer value. Implements the Knuth's division
* algorithm. See D. Knuth, The Art of Computer Programming, vol. 2.
*
* @param dest
* the quotient
* @param src
* the dividend
* @param srcLength
* the length of the dividend
* @param divisor
* the divisor
* @return remainder
*/
static int divideArrayByInt(int dest[], int src[], final int srcLength,
final int divisor) {
long rem = 0;
long bLong = divisor & 0xffffffffL;
for (int i = srcLength - 1; i >= 0; i--) {
long temp = (rem << 32) | (src[i] & 0xffffffffL);
long quot;
if (temp >= 0) {
quot = (temp / bLong);
rem = (temp % bLong);
} else {
/*
* make the dividend positive shifting it right by 1 bit then
* get the quotient an remainder and correct them properly
*/
long aPos = temp >>> 1;
long bPos = divisor >>> 1;
quot = aPos / bPos;
rem = aPos % bPos;
// double the remainder and add 1 if a is odd
rem = (rem << 1) + (temp & 1);
if ((divisor & 1) != 0) {
// the divisor is odd
if (quot <= rem) {
rem -= quot;
} else {
if (quot - rem <= bLong) {
rem += bLong - quot;
quot -= 1;
} else {
rem += (bLong << 1) - quot;
quot -= 2;
}
}
}
}
dest[i] = (int) (quot & 0xffffffffL);
}
return (int) rem;
}
/**
* Divides an array by an integer value. Implements the Knuth's division
* algorithm. See D. Knuth, The Art of Computer Programming, vol. 2.
*
* @param src
* the dividend
* @param srcLength
* the length of the dividend
* @param divisor
* the divisor
* @return remainder
*/
static int remainderArrayByInt(int src[], final int srcLength,
final int divisor) {
long result = 0;
for (int i = srcLength - 1; i >= 0; i--) {
long temp = (result << 32) + (src[i] & 0xffffffffL);
long res = divideLongByInt(temp, divisor);
result = (int) (res >> 32);
}
return (int) result;
}
/**
* Divides a <code>BigInteger</code> by a signed <code>int</code> and
* returns the remainder.
*
* @param dividend
* the BigInteger to be divided. Must be non-negative.
* @param divisor
* a signed int
* @return divide % divisor
*/
static int remainder(BigInteger dividend, int divisor) {
return remainderArrayByInt(dividend.digits, dividend.numberLength,
divisor);
}
/**
* Divides an unsigned long a by an unsigned int b. It is supposed that the
* most significant bit of b is set to 1, i.e. b < 0
*
* @param a
* the dividend
* @param b
* the divisor
* @return the long value containing the unsigned integer remainder in the
* left half and the unsigned integer quotient in the right half
*/
static long divideLongByInt(long a, int b) {
long quot;
long rem;
long bLong = b & 0xffffffffL;
if (a >= 0) {
quot = (a / bLong);
rem = (a % bLong);
} else {
/*
* Make the dividend positive shifting it right by 1 bit then get
* the quotient an remainder and correct them properly
*/
long aPos = a >>> 1;
long bPos = b >>> 1;
quot = aPos / bPos;
rem = aPos % bPos;
// double the remainder and add 1 if a is odd
rem = (rem << 1) + (a & 1);
if ((b & 1) != 0) { // the divisor is odd
if (quot <= rem) {
rem -= quot;
} else {
if (quot - rem <= bLong) {
rem += bLong - quot;
quot -= 1;
} else {
rem += (bLong << 1) - quot;
quot -= 2;
}
}
}
}
return (rem << 32) | (quot & 0xffffffffL);
}
/**
* Computes the quotient and the remainder after a division by an
* {@code int} number.
*
* @return an array of the form {@code [quotient, remainder]}.
*/
static BigInteger[] divideAndRemainderByInteger(BigInteger val,
int divisor, int divisorSign) {
// res[0] is a quotient and res[1] is a remainder:
int[] valDigits = val.digits;
int valLen = val.numberLength;
int valSign = val.sign;
if (valLen == 1) {
long a = (valDigits[0] & 0xffffffffL);
long b = (divisor & 0xffffffffL);
long quo = a / b;
long rem = a % b;
if (valSign != divisorSign) {
quo = -quo;
}
if (valSign < 0) {
rem = -rem;
}
return new BigInteger[] { BigInteger.valueOf(quo),
BigInteger.valueOf(rem) };
}
int quotientLength = valLen;
int quotientSign = ((valSign == divisorSign) ? 1 : -1);
int quotientDigits[] = new int[quotientLength];
int remainderDigits[];
remainderDigits = new int[] { Division.divideArrayByInt(quotientDigits,
valDigits, valLen, divisor) };
BigInteger result0 = new BigInteger(quotientSign, quotientLength,
quotientDigits);
BigInteger result1 = new BigInteger(valSign, 1, remainderDigits);
result0.cutOffLeadingZeroes();
result1.cutOffLeadingZeroes();
return new BigInteger[] { result0, result1 };
}
/**
* Multiplies an array by int and subtracts it from a subarray of another
* array.
*
* @param a
* the array to subtract from
* @param start
* the start element of the subarray of a
* @param b
* the array to be multiplied and subtracted
* @param bLen
* the length of b
* @param c
* the multiplier of b
* @return the carry element of subtraction
*/
static int multiplyAndSubtract(int a[], int start, int b[], int bLen, int c) {
long carry0 = 0;
long carry1 = 0;
for (int i = 0; i < bLen; i++) {
carry0 = Multiplication
.unsignedMultAddAdd(b[i], c, (int) carry0, 0);
carry1 = (a[start + i] & 0xffffffffL) - (carry0 & 0xffffffffL)
+ carry1;
a[start + i] = (int) carry1;
carry1 >>= 32; // -1 or 0
carry0 >>>= 32;
}
carry1 = (a[start + bLen] & 0xffffffffL) - carry0 + carry1;
a[start + bLen] = (int) carry1;
return (int) (carry1 >> 32); // -1 or 0
}
/**
* @param m
* a positive modulus Return the greatest common divisor of op1
* and op2,
*
* @param op1
* must be greater than zero
* @param op2
* must be greater than zero
* @see BigInteger#gcd(BigInteger)
* @return {@code GCD(op1, op2)}
*/
static BigInteger gcdBinary(BigInteger op1, BigInteger op2) {
// PRE: (op1 > 0) and (op2 > 0)
/*
* Divide both number the maximal possible times by 2 without rounding
* gcd(2*a, 2*b) = 2 * gcd(a,b)
*/
int lsb1 = op1.getLowestSetBit();
int lsb2 = op2.getLowestSetBit();
int pow2Count = Math.min(lsb1, lsb2);
BitLevel.inplaceShiftRight(op1, lsb1);
BitLevel.inplaceShiftRight(op2, lsb2);
BigInteger swap;
// I want op2 > op1
if (op1.compareTo(op2) == BigInteger.GREATER) {
swap = op1;
op1 = op2;
op2 = swap;
}
do { // INV: op2 >= op1 && both are odd unless op1 = 0
// Optimization for small operands
// (op2.bitLength() < 64) implies by INV (op1.bitLength() < 64)
if ((op2.numberLength == 1)
|| ((op2.numberLength == 2) && (op2.digits[1] > 0))) {
op2 = BigInteger.valueOf(Division.gcdBinary(op1.longValue(),
op2.longValue()));
break;
}
// Implements one step of the Euclidean algorithm
// To reduce one operand if it's much smaller than the other one
if (op2.numberLength > op1.numberLength * 1.2) {
op2 = op2.remainder(op1);
if (op2.signum() != 0) {
BitLevel.inplaceShiftRight(op2, op2.getLowestSetBit());
}
} else {
// Use Knuth's algorithm of successive subtract and shifting
do {
Elementary.inplaceSubtract(op2, op1); // both are odd
BitLevel.inplaceShiftRight(op2, op2.getLowestSetBit()); // op2
// is
// even
} while (op2.compareTo(op1) >= BigInteger.EQUALS);
}
// now op1 >= op2
swap = op2;
op2 = op1;
op1 = swap;
} while (op1.sign != 0);
return op2.shiftLeft(pow2Count);
}
/**
* Performs the same as {@link #gcdBinary(BigInteger, BigInteger)}, but with
* numbers of 63 bits, represented in positives values of {@code long} type.
*
* @param op1
* a positive number
* @param op2
* a positive number
* @see #gcdBinary(BigInteger, BigInteger)
* @return <code>GCD(op1, op2)</code>
*/
static long gcdBinary(long op1, long op2) {
// PRE: (op1 > 0) and (op2 > 0)
int lsb1 = Long.numberOfTrailingZeros(op1);
int lsb2 = Long.numberOfTrailingZeros(op2);
int pow2Count = Math.min(lsb1, lsb2);
if (lsb1 != 0) {
op1 >>>= lsb1;
}
if (lsb2 != 0) {
op2 >>>= lsb2;
}
do {
if (op1 >= op2) {
op1 -= op2;
op1 >>>= Long.numberOfTrailingZeros(op1);
} else {
op2 -= op1;
op2 >>>= Long.numberOfTrailingZeros(op2);
}
} while (op1 != 0);
return (op2 << pow2Count);
}
/**
* Calculates a.modInverse(p) Based on: Savas, E; Koc, C "The Montgomery
* Modular Inverse - Revised"
*/
static BigInteger modInverseMontgomery(BigInteger a, BigInteger p) {
if (a.sign == 0) {
// ZERO hasn't inverse
// math.19: BigInteger not invertible
throw new ArithmeticException("BigInterger not invertible");
}
if (!p.testBit(0)) {
// montgomery inverse require even modulo
return modInverseHars(a, p);
}
int m = p.numberLength * 32;
// PRE: a \in [1, p - 1]
BigInteger u, v, r, s;
u = p.copy(); // make copy to use inplace method
v = a.copy();
int max = Math.max(v.numberLength, u.numberLength);
r = new BigInteger(1, 1, new int[max + 1]);
s = new BigInteger(1, 1, new int[max + 1]);
s.digits[0] = 1;
// s == 1 && v == 0
int k = 0;
int lsbu = u.getLowestSetBit();
int lsbv = v.getLowestSetBit();
int toShift;
if (lsbu > lsbv) {
BitLevel.inplaceShiftRight(u, lsbu);
BitLevel.inplaceShiftRight(v, lsbv);
BitLevel.inplaceShiftLeft(r, lsbv);
k += lsbu - lsbv;
} else {
BitLevel.inplaceShiftRight(u, lsbu);
BitLevel.inplaceShiftRight(v, lsbv);
BitLevel.inplaceShiftLeft(s, lsbu);
k += lsbv - lsbu;
}
r.sign = 1;
while (v.signum() > 0) {
// INV v >= 0, u >= 0, v odd, u odd (except last iteration when v is
// even (0))
while (u.compareTo(v) > BigInteger.EQUALS) {
Elementary.inplaceSubtract(u, v);
toShift = u.getLowestSetBit();
BitLevel.inplaceShiftRight(u, toShift);
Elementary.inplaceAdd(r, s);
BitLevel.inplaceShiftLeft(s, toShift);
k += toShift;
}
while (u.compareTo(v) <= BigInteger.EQUALS) {
Elementary.inplaceSubtract(v, u);
if (v.signum() == 0)
break;
toShift = v.getLowestSetBit();
BitLevel.inplaceShiftRight(v, toShift);
Elementary.inplaceAdd(s, r);
BitLevel.inplaceShiftLeft(r, toShift);
k += toShift;
}
}
if (!u.isOne()) {
// in u is stored the gcd
// math.19: BigInteger not invertible.
throw new ArithmeticException("BigInterger not invertible");
}
if (r.compareTo(p) >= BigInteger.EQUALS) {
Elementary.inplaceSubtract(r, p);
}
r = p.subtract(r);
// Have pair: ((BigInteger)r, (Integer)k) where r == a^(-1) * 2^k mod
// (module)
int n1 = calcN(p);
if (k > m) {
r = monPro(r, BigInteger.ONE, p, n1);
k = k - m;
}
r = monPro(r, BigInteger.getPowerOfTwo(m - k), p, n1);
return r;
}
/**
* Calculate the first digit of the inverse
*/
private static int calcN(BigInteger a) {
long m0 = a.digits[0] & 0xFFFFFFFFL;
long n2 = 1L; // this is a'[0]
long powerOfTwo = 2L;
do {
if (((m0 * n2) & powerOfTwo) != 0) {
n2 |= powerOfTwo;
}
powerOfTwo <<= 1;
} while (powerOfTwo < 0x100000000L);
n2 = -n2;
return (int) (n2 & 0xFFFFFFFFL);
}
static BigInteger squareAndMultiply(BigInteger x2, BigInteger a2,
BigInteger exponent, BigInteger modulus, int n2) {
BigInteger res = x2;
for (int i = exponent.bitLength() - 1; i >= 0; i--) {
res = monPro(res, res, modulus, n2);
if (BitLevel.testBit(exponent, i)) {
res = monPro(res, a2, modulus, n2);
}
}
return res;
}
/**
* Implements the "Shifting Euclidean modular inverse algorithm". "Laszlo
* Hars - Modular Inverse Algorithms Without Multiplications for
* Cryptographic Applications"
*
* @see BigInteger#modInverse(BigInteger)
* @param a
* a positive number
* @param m
* a positive modulus
*/
static BigInteger modInverseHars(BigInteger a, BigInteger m) {
// PRE: (a > 0) and (m > 0)
BigInteger u, v, r, s, temp;
// u = MAX(a,m), v = MIN(a,m)
if (a.compareTo(m) == BigInteger.LESS) {
u = m;
v = a;
r = BigInteger.ZERO;
s = BigInteger.ONE;
} else {
v = m;
u = a;
s = BigInteger.ZERO;
r = BigInteger.ONE;
}
int uLen = u.bitLength();
int vLen = v.bitLength();
int f = uLen - vLen;
while (vLen > 1) {
if (u.sign == v.sign) {
u = u.subtract(v.shiftLeft(f));
r = r.subtract(s.shiftLeft(f));
} else {
u = u.add(v.shiftLeft(f));
r = r.add(s.shiftLeft(f));
}
uLen = u.abs().bitLength();
vLen = v.abs().bitLength();
f = uLen - vLen;
if (f < 0) {
// SWAP(u,v)
temp = u;
u = v;
v = temp;
// SWAP(r,s)
temp = r;
r = s;
s = temp;
f = -f;
vLen = uLen;
}
}
if (v.sign == 0) {
return BigInteger.ZERO;
}
if (v.sign < 0) {
s = s.negate();
}
if (s.compareTo(m) == BigInteger.GREATER) {
return s.subtract(m);
}
if (s.sign < 0) {
return s.add(m);
}
return s; // a^(-1) mod m
}
/*
* Implements the Montgomery modular exponentiation based in <i>The sliding
* windows algorithm and the MongomeryReduction</i>.
*
* @ar.org.fitc.ref
* "A. Menezes,P. van Oorschot, S. Vanstone - Handbook of Applied Cryptography"
* ;
*
* @see #oddModPow(BigInteger, BigInteger, BigInteger)
*/
static BigInteger slidingWindow(BigInteger x2, BigInteger a2,
BigInteger exponent, BigInteger modulus, int n2) {
// fill odd low pows of a2
BigInteger pows[] = new BigInteger[8];
BigInteger res = x2;
int lowexp;
BigInteger x3;
int acc3;
pows[0] = a2;
x3 = monPro(a2, a2, modulus, n2);
for (int i = 1; i <= 7; i++) {
pows[i] = monPro(pows[i - 1], x3, modulus, n2);
}
for (int i = exponent.bitLength() - 1; i >= 0; i--) {
if (BitLevel.testBit(exponent, i)) {
lowexp = 1;
acc3 = i;
for (int j = Math.max(i - 3, 0); j <= i - 1; j++) {
if (BitLevel.testBit(exponent, j)) {
if (j < acc3) {
acc3 = j;
lowexp = (lowexp << (i - j)) ^ 1;
} else {
lowexp = lowexp ^ (1 << (j - acc3));
}
}
}
for (int j = acc3; j <= i; j++) {
res = monPro(res, res, modulus, n2);
}
res = monPro(pows[(lowexp - 1) >> 1], res, modulus, n2);
i = acc3;
} else {
res = monPro(res, res, modulus, n2);
}
}
return res;
}
/**
* Performs modular exponentiation using the Montgomery Reduction. It
* requires that all parameters be positive and the modulus be odd. >
*
* @see BigInteger#modPow(BigInteger, BigInteger)
* @see #monPro(BigInteger, BigInteger, BigInteger, int)
* @see #slidingWindow(BigInteger, BigInteger, BigInteger, BigInteger, int)
* @see #squareAndMultiply(BigInteger, BigInteger, BigInteger, BigInteger,
* int)
*/
static BigInteger oddModPow(BigInteger base, BigInteger exponent,
BigInteger modulus) {
// PRE: (base > 0), (exponent > 0), (modulus > 0) and (odd modulus)
int k = (modulus.numberLength << 5); // r = 2^k
// n-residue of base [base * r (mod modulus)]
BigInteger a2 = base.shiftLeft(k).mod(modulus);
// n-residue of base [1 * r (mod modulus)]
BigInteger x2 = BigInteger.getPowerOfTwo(k).mod(modulus);
BigInteger res;
// Compute (modulus[0]^(-1)) (mod 2^32) for odd modulus
int n2 = calcN(modulus);
if (modulus.numberLength == 1) {
res = squareAndMultiply(x2, a2, exponent, modulus, n2);
} else {
res = slidingWindow(x2, a2, exponent, modulus, n2);
}
return monPro(res, BigInteger.ONE, modulus, n2);
}
/**
* Performs modular exponentiation using the Montgomery Reduction. It
* requires that all parameters be positive and the modulus be even. Based
* <i>The square and multiply algorithm and the Montgomery Reduction C. K.
* Koc - Montgomery Reduction with Even Modulus</i>. The square and multiply
* algorithm and the Montgomery Reduction.
*
* @ar.org.fitc.ref "C. K. Koc - Montgomery Reduction with Even Modulus"
* @see BigInteger#modPow(BigInteger, BigInteger)
*/
static BigInteger evenModPow(BigInteger base, BigInteger exponent,
BigInteger modulus) {
// PRE: (base > 0), (exponent > 0), (modulus > 0) and (modulus even)
// STEP 1: Obtain the factorization 'modulus'= q * 2^j.
int j = modulus.getLowestSetBit();
BigInteger q = modulus.shiftRight(j);
// STEP 2: Compute x1 := base^exponent (mod q).
BigInteger x1 = oddModPow(base, exponent, q);
// STEP 3: Compute x2 := base^exponent (mod 2^j).
BigInteger x2 = pow2ModPow(base, exponent, j);
// STEP 4: Compute q^(-1) (mod 2^j) and y := (x2-x1) * q^(-1) (mod 2^j)
BigInteger qInv = modPow2Inverse(q, j);
BigInteger y = (x2.subtract(x1)).multiply(qInv);
inplaceModPow2(y, j);
if (y.sign < 0) {
y = y.add(BigInteger.getPowerOfTwo(j));
}
// STEP 5: Compute and return: x1 + q * y
return x1.add(q.multiply(y));
}
/**
* It requires that all parameters be positive.
*
* @return {@code base<sup>exponent</sup> mod (2<sup>j</sup>)}.
* @see BigInteger#modPow(BigInteger, BigInteger)
*/
static BigInteger pow2ModPow(BigInteger base, BigInteger exponent, int j) {
// PRE: (base > 0), (exponent > 0) and (j > 0)
BigInteger res = BigInteger.ONE;
BigInteger e = exponent.copy();
BigInteger baseMod2toN = base.copy();
BigInteger res2;
/*
* If 'base' is odd then it's coprime with 2^j and phi(2^j) = 2^(j-1);
* so we can reduce reduce the exponent (mod 2^(j-1)).
*/
if (base.testBit(0)) {
inplaceModPow2(e, j - 1);
}
inplaceModPow2(baseMod2toN, j);
for (int i = e.bitLength() - 1; i >= 0; i--) {
res2 = res.copy();
inplaceModPow2(res2, j);
res = res.multiply(res2);
if (BitLevel.testBit(e, i)) {
res = res.multiply(baseMod2toN);
inplaceModPow2(res, j);
}
}
inplaceModPow2(res, j);
return res;
}
private static void monReduction(int[] res, BigInteger modulus, int n2) {
/* res + m*modulus_digits */
int[] modulus_digits = modulus.digits;
int modulusLen = modulus.numberLength;
long outerCarry = 0;
for (int i = 0; i < modulusLen; i++) {
long innnerCarry = 0;
int m = (int) Multiplication.unsignedMultAddAdd(res[i], n2, 0, 0);
for (int j = 0; j < modulusLen; j++) {
innnerCarry = Multiplication.unsignedMultAddAdd(m,
modulus_digits[j], res[i + j], (int) innnerCarry);
res[i + j] = (int) innnerCarry;
innnerCarry >>>= 32;
}
outerCarry += (res[i + modulusLen] & 0xFFFFFFFFL) + innnerCarry;
res[i + modulusLen] = (int) outerCarry;
outerCarry >>>= 32;
}
res[modulusLen << 1] = (int) outerCarry;
/* res / r */
for (int j = 0; j < modulusLen + 1; j++) {
res[j] = res[j + modulusLen];
}
}
/**
* Implements the Montgomery Product of two integers represented by
* {@code int} arrays. The arrays are supposed in <i>little endian</i>
* notation.
*
* @param a
* The first factor of the product.
* @param b
* The second factor of the product.
* @param modulus
* The modulus of the operations. Z<sub>modulus</sub>.
* @param n2
* The digit modulus'[0].
* @ar.org.fitc.ref "C. K. Koc - Analyzing and Comparing Montgomery
* Multiplication Algorithms"
* @see #modPowOdd(BigInteger, BigInteger, BigInteger)
*/
static BigInteger monPro(BigInteger a, BigInteger b, BigInteger modulus,
int n2) {
int modulusLen = modulus.numberLength;
int res[] = new int[(modulusLen << 1) + 1];
Multiplication.multArraysPAP(a.digits,
Math.min(modulusLen, a.numberLength), b.digits,
Math.min(modulusLen, b.numberLength), res);
monReduction(res, modulus, n2);
return finalSubtraction(res, modulus);
}
/**
* Performs the final reduction of the Montgomery algorithm.
*
* @see monPro(BigInteger, BigInteger, BigInteger, long)
* @see monSquare(BigInteger, BigInteger, long)
*/
static BigInteger finalSubtraction(int res[], BigInteger modulus) {
// skipping leading zeros
int modulusLen = modulus.numberLength;
boolean doSub = res[modulusLen] != 0;
if (!doSub) {
int modulusDigits[] = modulus.digits;
doSub = true;
for (int i = modulusLen - 1; i >= 0; i--) {
if (res[i] != modulusDigits[i]) {
doSub = (res[i] != 0)
&& ((res[i] & 0xFFFFFFFFL) > (modulusDigits[i] & 0xFFFFFFFFL));
break;
}
}
}
BigInteger result = new BigInteger(1, modulusLen + 1, res);
// if (res >= modulusDigits) compute (res - modulusDigits)
if (doSub) {
Elementary.inplaceSubtract(result, modulus);
}
result.cutOffLeadingZeroes();
return result;
}
/**
* @param x
* an odd positive number.
* @param n
* the exponent by which 2 is raised.
* @return {@code x<sup>-1</sup> (mod 2<sup>n</sup>)}.
*/
static BigInteger modPow2Inverse(BigInteger x, int n) {
// PRE: (x > 0), (x is odd), and (n > 0)
BigInteger y = new BigInteger(1, new int[1 << n]);
y.numberLength = 1;
y.digits[0] = 1;
y.sign = 1;
for (int i = 1; i < n; i++) {
if (BitLevel.testBit(x.multiply(y), i)) {
// Adding 2^i to y (setting the i-th bit)
y.digits[i >> 5] |= (1 << (i & 31));
}
}
return y;
}
/**
* Performs {@code x = x mod (2<sup>n</sup>)}.
*
* @param x
* a positive number, it will store the result.
* @param n
* a positive exponent of {@code 2}.
*/
static void inplaceModPow2(BigInteger x, int n) {
// PRE: (x > 0) and (n >= 0)
int fd = n >> 5;
int leadingZeros;
if ((x.numberLength < fd) || (x.bitLength() <= n)) {
return;
}
leadingZeros = 32 - (n & 31);
x.numberLength = fd + 1;
x.digits[fd] &= (leadingZeros < 32) ? (-1 >>> leadingZeros) : 0;
x.cutOffLeadingZeroes();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.scr.impl.config;
import java.io.IOException;
import java.util.Dictionary;
import java.util.Hashtable;
import org.apache.felix.scr.impl.Activator;
import org.apache.felix.scr.impl.BundleComponentActivator;
import org.apache.felix.scr.impl.ComponentRegistry;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
import org.osgi.service.cm.ConfigurationEvent;
import org.osgi.service.cm.ConfigurationListener;
import org.osgi.service.log.LogService;
public class ConfigurationSupport implements ConfigurationListener
{
// the registry of components to be configured
private final ComponentRegistry m_registry;
// the service m_registration of the ConfigurationListener service
private ServiceRegistration m_registration;
public ConfigurationSupport(final BundleContext bundleContext, final ComponentRegistry registry)
{
this.m_registry = registry;
// register as listener for configurations
Dictionary props = new Hashtable();
props.put(Constants.SERVICE_DESCRIPTION, "Declarative Services Configuration Support Listener");
props.put(Constants.SERVICE_VENDOR, "The Apache Software Foundation");
this.m_registration = bundleContext.registerService(new String[]
{ "org.osgi.service.cm.ConfigurationListener" }, this, props);
}
public void dispose()
{
if (this.m_registration != null)
{
this.m_registration.unregister();
this.m_registration = null;
}
}
// ---------- BaseConfigurationSupport overwrites
public void configureComponentHolder(final ComponentHolder holder)
{
// 112.7 configure unless configuration not required
if (!holder.getComponentMetadata().isConfigurationIgnored())
{
final BundleContext bundleContext = holder.getActivator().getBundleContext();
final String bundleLocation = bundleContext.getBundle().getLocation();
final String name = holder.getComponentMetadata().getName();
final ServiceReference caRef = bundleContext.getServiceReference(ComponentRegistry.CONFIGURATION_ADMIN);
if (caRef != null)
{
final ConfigurationAdmin ca = (ConfigurationAdmin) bundleContext.getService(caRef);
if (ca != null)
{
try
{
final Configuration[] factory = findFactoryConfigurations(ca, name);
if (factory != null)
{
for (int i = 0; i < factory.length; i++)
{
final String pid = factory[i].getPid();
final Dictionary props = getConfiguration(ca, pid, bundleLocation);
holder.configurationUpdated(pid, props);
}
}
else
{
// check for configuration and configure the holder
final Configuration singleton = findSingletonConfiguration(ca, name);
if (singleton != null)
{
final Dictionary props = getConfiguration(ca, name, bundleLocation);
holder.configurationUpdated(name, props);
}
}
}
finally
{
try
{
bundleContext.ungetService( caRef );
}
catch ( IllegalStateException e )
{
// ignore, bundle context was shut down during the above.
}
}
}
}
}
}
// ---------- ServiceListener
public void configureComponentHolders(final ServiceReference configurationAdminReference,
final Object configurationAdmin)
{
if (configurationAdmin instanceof ConfigurationAdmin)
{
Configuration[] configs = findConfigurations((ConfigurationAdmin) configurationAdmin, null);
if (configs != null)
{
for (int i = 0; i < configs.length; i++)
{
ConfigurationEvent cfgEvent = new ConfigurationEvent(configurationAdminReference,
ConfigurationEvent.CM_UPDATED, configs[i].getFactoryPid(), configs[i].getPid());
configurationEvent(cfgEvent);
}
}
}
}
// ---------- ConfigurationListener
/**
* Called by the Configuration Admin service if a configuration is updated
* or removed.
* <p>
* This method is really only called upon configuration changes; it is not
* called for existing configurations upon startup of the Configuration
* Admin service. To bridge this gap, the
* {@link #serviceChanged(ServiceEvent)} method called when the
* Configuration Admin service is registered calls this method for all
* existing configurations to be able to foward existing configurations to
* components.
*
* @param event The configuration change event
*/
public void configurationEvent(ConfigurationEvent event)
{
final String pid = event.getPid();
final String factoryPid = event.getFactoryPid();
final ComponentHolder cm;
if (factoryPid == null)
{
cm = this.m_registry.getComponentHolder(pid);
}
else
{
cm = this.m_registry.getComponentHolder(factoryPid);
}
Activator.log(LogService.LOG_DEBUG, null, "configurationEvent: Handling "
+ ((event.getType() == ConfigurationEvent.CM_DELETED) ? "DELETE" : "UPDATE") + " of Configuration PID="
+ pid, null);
if (cm != null && !cm.getComponentMetadata().isConfigurationIgnored())
{
switch (event.getType())
{
case ConfigurationEvent.CM_DELETED:
cm.configurationDeleted(pid);
break;
case ConfigurationEvent.CM_UPDATED:
final BundleComponentActivator activator = cm.getActivator();
if (activator == null)
{
break;
}
final BundleContext bundleContext = activator.getBundleContext();
if (bundleContext == null)
{
break;
}
final ServiceReference caRef = bundleContext
.getServiceReference(ComponentRegistry.CONFIGURATION_ADMIN);
if (caRef != null)
{
try
{
final ConfigurationAdmin ca = (ConfigurationAdmin) bundleContext.getService(caRef);
if (ca != null)
{
try
{
final Dictionary dict = getConfiguration(ca, pid, bundleContext.getBundle()
.getLocation());
if (dict != null)
{
cm.configurationUpdated(pid, dict);
}
}
finally
{
bundleContext.ungetService(caRef);
}
}
}
catch (IllegalStateException ise)
{
// If the bundle has been stopped conurrently
}
}
break;
default:
Activator.log(LogService.LOG_WARNING, null, "Unknown ConfigurationEvent type " + event.getType(),
null);
}
}
}
private Dictionary getConfiguration(final ConfigurationAdmin ca, final String pid, final String bundleLocation)
{
try
{
final Configuration cfg = ca.getConfiguration(pid);
if (bundleLocation.equals(cfg.getBundleLocation()) || Activator.hasCtWorkaround())
{
return cfg.getProperties();
}
// configuration belongs to another bundle, cannot be used here
Activator.log(LogService.LOG_ERROR, null, "Cannot use configuration pid=" + pid + " for bundle "
+ bundleLocation + " because it belongs to bundle " + cfg.getBundleLocation(), null);
}
catch (IOException ioe)
{
Activator.log(LogService.LOG_WARNING, null, "Failed reading configuration for pid=" + pid, ioe);
}
return null;
}
/**
* Returns the configuration whose PID equals the given pid. If no such
* configuration exists, <code>null</code> is returned.
*
* @param ctx
* @param pid
* @return
*/
public Configuration findSingletonConfiguration(final ConfigurationAdmin ca, final String pid)
{
final String filter = "(service.pid=" + pid + ")";
final Configuration[] cfg = findConfigurations(ca, filter);
return (cfg == null || cfg.length == 0) ? null : cfg[0];
}
/**
* Returns all configurations whose factory PID equals the given factory PID
* or <code>null</code> if no such configurations exist
*
* @param ctx
* @param factoryPid
* @return
*/
public Configuration[] findFactoryConfigurations(final ConfigurationAdmin ca, final String factoryPid)
{
final String filter = "(service.factoryPid=" + factoryPid + ")";
return findConfigurations(ca, filter);
}
private Configuration[] findConfigurations(final ConfigurationAdmin ca, final String filter)
{
try
{
return ca.listConfigurations(filter);
}
catch (IOException ioe)
{
Activator.log(LogService.LOG_WARNING, null, "Problem listing configurations for filter=" + filter, ioe);
}
catch (InvalidSyntaxException ise)
{
Activator.log(LogService.LOG_ERROR, null, "Invalid Configuration selection filter " + filter, ise);
}
// no factories in case of problems
return null;
}
}
| |
/*
* Copyright 2005 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.geometry;
import com.google.common.annotations.GwtCompatible;
import com.google.common.io.BaseEncoding;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/** Tests for S2Cap. */
@GwtCompatible
public strictfp class S2CapTest extends GeometryTestCase {
public S2Point getLatLngPoint(double latDegrees, double lngDegrees) {
return S2LatLng.fromDegrees(latDegrees, lngDegrees).toPoint();
}
// About 9 times the double-precision roundoff relative error.
public static final double EPS = 1e-15;
public void testBasic() {
// Test basic properties of empty and full caps.
S2Cap empty = S2Cap.empty();
S2Cap full = S2Cap.full();
assertTrue(empty.isValid());
assertTrue(empty.isEmpty());
assertTrue(empty.complement().isFull());
assertTrue(full.isValid());
assertTrue(full.isFull());
assertTrue(full.complement().isEmpty());
assertEquals(2.0, full.height());
assertDoubleNear(full.angle().degrees(), 180);
// Test the S1Angle constructor using out-of-range arguments.
assertTrue(S2Cap.fromAxisAngle(S2Point.X_POS, S1Angle.radians(-20)).isEmpty());
assertTrue(S2Cap.fromAxisAngle(S2Point.X_POS, S1Angle.radians(5)).isFull());
assertTrue(S2Cap.fromAxisAngle(S2Point.X_POS, S1Angle.INFINITY).isFull());
// Containment and intersection of empty and full caps.
assertTrue(empty.contains(empty));
assertTrue(full.contains(empty));
assertTrue(full.contains(full));
assertTrue(!empty.interiorIntersects(empty));
assertTrue(full.interiorIntersects(full));
assertTrue(!full.interiorIntersects(empty));
// Singleton cap containing the x-axis.
S2Cap xaxis = S2Cap.fromAxisHeight(new S2Point(1, 0, 0), 0);
assertTrue(xaxis.contains(new S2Point(1, 0, 0)));
assertTrue(!xaxis.contains(new S2Point(1, 1e-20, 0)));
assertEquals(0.0, xaxis.angle().radians());
// Singleton cap containing the y-axis.
S2Cap yaxis = S2Cap.fromAxisAngle(new S2Point(0, 1, 0), S1Angle.radians(0));
assertTrue(!yaxis.contains(xaxis.axis()));
assertEquals(0.0, xaxis.height());
// Check that the complement of a singleton cap is the full cap.
S2Cap xcomp = xaxis.complement();
assertTrue(xcomp.isValid());
assertTrue(xcomp.isFull());
assertTrue(xcomp.contains(xaxis.axis()));
// Check that the complement of the complement is *not* the original.
assertTrue(xcomp.complement().isValid());
assertTrue(xcomp.complement().isEmpty());
assertTrue(!xcomp.complement().contains(xaxis.axis()));
// Check that very small caps can be represented accurately.
// Here "kTinyRad" is small enough that unit vectors perturbed by this
// amount along a tangent do not need to be renormalized.
final double kTinyRad = 1e-10;
S2Cap tiny =
S2Cap.fromAxisAngle(S2Point.normalize(new S2Point(1, 2, 3)), S1Angle.radians(kTinyRad));
S2Point tangent = S2Point.normalize(S2Point.crossProd(tiny.axis(), new S2Point(3, 2, 1)));
assertTrue(tiny.contains(S2Point.add(tiny.axis(), S2Point.mul(tangent, 0.99 * kTinyRad))));
assertTrue(!tiny.contains(S2Point.add(tiny.axis(), S2Point.mul(tangent, 1.01 * kTinyRad))));
// Basic tests on a hemispherical cap.
S2Cap hemi = S2Cap.fromAxisHeight(S2Point.normalize(new S2Point(1, 0, 1)), 1);
assertEquals(hemi.complement().axis(), S2Point.neg(hemi.axis()));
assertEquals(1.0, hemi.complement().height());
assertTrue(hemi.contains(new S2Point(1, 0, 0)));
assertTrue(!hemi.complement().contains(new S2Point(1, 0, 0)));
assertTrue(hemi.contains(S2Point.normalize(new S2Point(1, 0, -(1 - EPS)))));
assertTrue(!hemi.interiorContains(S2Point.normalize(new S2Point(1, 0, -(1 + EPS)))));
// A concave cap.
S2Cap concave = S2Cap.fromAxisAngle(getLatLngPoint(80, 10), S1Angle.degrees(150));
assertTrue(concave.contains(getLatLngPoint(-70 * (1 - EPS), 10)));
assertTrue(!concave.contains(getLatLngPoint(-70 * (1 + EPS), 10)));
assertTrue(concave.contains(getLatLngPoint(-50 * (1 - EPS), -170)));
assertTrue(!concave.contains(getLatLngPoint(-50 * (1 + EPS), -170)));
// Cap containment tests.
assertTrue(!empty.contains(xaxis));
assertTrue(!empty.interiorIntersects(xaxis));
assertTrue(full.contains(xaxis));
assertTrue(full.interiorIntersects(xaxis));
assertTrue(!xaxis.contains(full));
assertTrue(!xaxis.interiorIntersects(full));
assertTrue(xaxis.contains(xaxis));
assertTrue(!xaxis.interiorIntersects(xaxis));
assertTrue(xaxis.contains(empty));
assertTrue(!xaxis.interiorIntersects(empty));
assertTrue(hemi.contains(tiny));
assertTrue(
hemi.contains(S2Cap.fromAxisAngle(new S2Point(1, 0, 0), S1Angle.radians(S2.M_PI_4 - EPS))));
assertTrue(
!hemi.contains(
S2Cap.fromAxisAngle(new S2Point(1, 0, 0), S1Angle.radians(S2.M_PI_4 + EPS))));
assertTrue(concave.contains(hemi));
assertTrue(concave.interiorIntersects(hemi.complement()));
assertTrue(!concave.contains(S2Cap.fromAxisHeight(S2Point.neg(concave.axis()), 0.1)));
}
public void testAddEmptyCapToNonEmptyCap() {
S2Cap nonEmptyCap = S2Cap.fromAxisAngle(S2Point.X_POS, S1Angle.degrees(10));
assertEquals(nonEmptyCap, nonEmptyCap.addCap(S2Cap.empty()));
}
public void testAddNonEmptyCapToEmptyCap() {
S2Cap nonEmptyCap = S2Cap.fromAxisAngle(S2Point.X_POS, S1Angle.degrees(10));
assertEquals(nonEmptyCap, S2Cap.empty().addCap(nonEmptyCap));
}
public void testCustomEmpty() {
// Verifies that clients can still create custom negative-height empty caps.
S2Cap empty = S2Cap.fromAxisHeight(S2Point.X_POS, -1);
assertEquals(-1.0, empty.height());
assertTrue(empty.isEmpty());
}
public void testRectBound() {
// Empty and full caps.
assertTrue(S2Cap.empty().getRectBound().isEmpty());
assertTrue(S2Cap.full().getRectBound().isFull());
final double kDegreeEps = 1e-13;
// Maximum allowable error for latitudes and longitudes measured in
// degrees. (assertDoubleNear uses a fixed tolerance that is too small.)
// Cap that includes the south pole.
S2LatLngRect rect =
S2Cap.fromAxisAngle(getLatLngPoint(-45, 57), S1Angle.degrees(50)).getRectBound();
assertDoubleNear(rect.latLo().degrees(), -90, kDegreeEps);
assertDoubleNear(rect.latHi().degrees(), 5, kDegreeEps);
assertTrue(rect.lng().isFull());
// Cap that is tangent to the north pole.
rect =
S2Cap.fromAxisAngle(S2Point.normalize(new S2Point(1, 0, 1)), S1Angle.radians(S2.M_PI_4))
.getRectBound();
assertDoubleNear(rect.lat().lo(), 0);
assertDoubleNear(rect.lat().hi(), S2.M_PI_2);
assertTrue(rect.lng().isFull());
rect =
S2Cap.fromAxisAngle(S2Point.normalize(new S2Point(1, 0, 1)), S1Angle.degrees(45))
.getRectBound();
assertDoubleNear(rect.latLo().degrees(), 0, kDegreeEps);
assertDoubleNear(rect.latHi().degrees(), 90, kDegreeEps);
assertTrue(rect.lng().isFull());
// The eastern hemisphere.
rect =
S2Cap.fromAxisAngle(new S2Point(0, 1, 0), S1Angle.radians(S2.M_PI_2 + 5e-16))
.getRectBound();
assertDoubleNear(rect.latLo().degrees(), -90, kDegreeEps);
assertDoubleNear(rect.latHi().degrees(), 90, kDegreeEps);
assertTrue(rect.lng().isFull());
// A cap centered on the equator.
rect = S2Cap.fromAxisAngle(getLatLngPoint(0, 50), S1Angle.degrees(20)).getRectBound();
assertDoubleNear(rect.latLo().degrees(), -20, kDegreeEps);
assertDoubleNear(rect.latHi().degrees(), 20, kDegreeEps);
assertDoubleNear(rect.lngLo().degrees(), 30, kDegreeEps);
assertDoubleNear(rect.lngHi().degrees(), 70, kDegreeEps);
// A cap centered on the north pole.
rect = S2Cap.fromAxisAngle(getLatLngPoint(90, 123), S1Angle.degrees(10)).getRectBound();
assertDoubleNear(rect.latLo().degrees(), 80, kDegreeEps);
assertDoubleNear(rect.latHi().degrees(), 90, kDegreeEps);
assertTrue(rect.lng().isFull());
}
public void testCells() {
// For each cube face, we construct some cells on
// that face and some caps whose positions are relative to that face,
// and then check for the expected intersection/containment results.
// The distance from the center of a face to one of its vertices.
final double kFaceRadius = Math.atan(S2.M_SQRT2);
for (int face = 0; face < 6; ++face) {
// The cell consisting of the entire face.
S2Cell rootCell = S2Cell.fromFace(face);
// A leaf cell at the midpoint of the v=1 edge.
S2Cell edgeCell = new S2Cell(S2Projections.faceUvToXyz(face, 0, 1 - EPS));
// A leaf cell at the u=1, v=1 corner.
S2Cell cornerCell = new S2Cell(S2Projections.faceUvToXyz(face, 1 - EPS, 1 - EPS));
// Quick check for full and empty caps.
assertTrue(S2Cap.full().contains(rootCell));
assertTrue(!S2Cap.empty().mayIntersect(rootCell));
// Check intersections with the bounding caps of the leaf cells that are
// adjacent to 'corner_cell' along the Hilbert curve. Because this corner
// is at (u=1,v=1), the curve stays locally within the same cube face.
S2CellId first = cornerCell.id().prev().prev().prev();
S2CellId last = cornerCell.id().next().next().next().next();
for (S2CellId id = first; id.lessThan(last); id = id.next()) {
S2Cell cell = new S2Cell(id);
assertEquals(cell.getCapBound().contains(cornerCell), id.equals(cornerCell.id()));
assertEquals(
cell.getCapBound().mayIntersect(cornerCell), id.parent().contains(cornerCell.id()));
}
int antiFace = (face + 3) % 6; // Opposite face.
for (int capFace = 0; capFace < 6; ++capFace) {
// A cap that barely contains all of 'cap_face'.
S2Point center = S2Projections.getNorm(capFace);
S2Cap covering = S2Cap.fromAxisAngle(center, S1Angle.radians(kFaceRadius + EPS));
assertEquals(covering.contains(rootCell), capFace == face);
assertEquals(covering.mayIntersect(rootCell), capFace != antiFace);
assertEquals(covering.contains(edgeCell), center.dotProd(edgeCell.getCenter()) > 0.1);
assertEquals(covering.contains(edgeCell), covering.mayIntersect(edgeCell));
assertEquals(covering.contains(cornerCell), capFace == face);
assertEquals(covering.mayIntersect(cornerCell), center.dotProd(cornerCell.getCenter()) > 0);
// A cap that barely intersects the edges of 'cap_face'.
S2Cap bulging = S2Cap.fromAxisAngle(center, S1Angle.radians(S2.M_PI_4 + EPS));
assertTrue(!bulging.contains(rootCell));
assertEquals(bulging.mayIntersect(rootCell), capFace != antiFace);
assertEquals(bulging.contains(edgeCell), capFace == face);
assertEquals(bulging.mayIntersect(edgeCell), center.dotProd(edgeCell.getCenter()) > 0.1);
assertTrue(!bulging.contains(cornerCell));
assertTrue(!bulging.mayIntersect(cornerCell));
// A singleton cap.
S2Cap singleton = S2Cap.fromAxisAngle(center, S1Angle.radians(0));
assertEquals(singleton.mayIntersect(rootCell), capFace == face);
assertTrue(!singleton.mayIntersect(edgeCell));
assertTrue(!singleton.mayIntersect(cornerCell));
}
}
}
public void testSerialization() throws IOException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
S2Cap testCap = S2Cap.fromAxisHeight(S2Point.X_NEG, 0.123);
testCap.encode(bos);
assertEquals(testCap, S2Cap.decode(new ByteArrayInputStream(bos.toByteArray())));
}
public void testDecodeEmptyCap() throws IOException {
checkCoder("000000000000F03F00000000000000000000000000000000000000000000F0BF", S2Cap.empty());
}
public void testDecodeFullCap() throws IOException {
checkCoder("000000000000F03F000000000000000000000000000000000000000000001040", S2Cap.full());
}
public void testDecodeCapWithHeight() throws IOException {
checkCoder(
"00000000000000000000000000000000000000000000F03F0000000000001040",
S2Cap.fromAxisHeight(new S2Point(0, 0, 1), 5.0));
}
private static void checkCoder(String hex, S2Cap expected) throws IOException {
BaseEncoding testFormat = BaseEncoding.base16();
ByteArrayInputStream bais = new ByteArrayInputStream(testFormat.decode(hex));
assertEquals(expected, S2Cap.decode(bais));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
expected.encode(baos);
assertEquals(hex, testFormat.encode(baos.toByteArray()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.io.CharSource;
import com.google.common.io.LineProcessor;
import com.google.common.io.Resources;
import org.apache.druid.data.input.impl.DelimitedParseSpec;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.DoubleDimensionSchema;
import org.apache.druid.data.input.impl.FloatDimensionSchema;
import org.apache.druid.data.input.impl.LongDimensionSchema;
import org.apache.druid.data.input.impl.StringDimensionSchema;
import org.apache.druid.data.input.impl.StringInputRowParser;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.FloatMaxAggregatorFactory;
import org.apache.druid.query.aggregation.FloatMinAggregatorFactory;
import org.apache.druid.query.aggregation.FloatSumAggregatorFactory;
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.joda.time.Interval;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
/**
*/
public class TestIndex
{
public static final String[] COLUMNS = new String[]{
"ts",
"market",
"quality",
"qualityLong",
"qualityFloat",
"qualityDouble",
"qualityNumericString",
"longNumericNull",
"floatNumericNull",
"doubleNumericNull",
"placement",
"placementish",
"index",
"partial_null_column",
"null_column",
"quality_uniques",
"indexMin",
"indexMaxPlusTen"
};
public static final List<DimensionSchema> DIMENSION_SCHEMAS = Arrays.asList(
new StringDimensionSchema("market"),
new StringDimensionSchema("quality"),
new LongDimensionSchema("qualityLong"),
new FloatDimensionSchema("qualityFloat"),
new DoubleDimensionSchema("qualityDouble"),
new StringDimensionSchema("qualityNumericString"),
new LongDimensionSchema("longNumericNull"),
new FloatDimensionSchema("floatNumericNull"),
new DoubleDimensionSchema("doubleNumericNull"),
new StringDimensionSchema("placement"),
new StringDimensionSchema("placementish"),
new StringDimensionSchema("partial_null_column"),
new StringDimensionSchema("null_column")
);
public static final List<DimensionSchema> DIMENSION_SCHEMAS_NO_BITMAP = Arrays.asList(
new StringDimensionSchema("market", null, false),
new StringDimensionSchema("quality", null, false),
new LongDimensionSchema("qualityLong"),
new FloatDimensionSchema("qualityFloat"),
new DoubleDimensionSchema("qualityDouble"),
new StringDimensionSchema("qualityNumericString", null, false),
new LongDimensionSchema("longNumericNull"),
new FloatDimensionSchema("floatNumericNull"),
new DoubleDimensionSchema("doubleNumericNull"),
new StringDimensionSchema("placement", null, false),
new StringDimensionSchema("placementish", null, false),
new StringDimensionSchema("partial_null_column", null, false),
new StringDimensionSchema("null_column", null, false)
);
public static final DimensionsSpec DIMENSIONS_SPEC = new DimensionsSpec(
DIMENSION_SCHEMAS,
null,
null
);
public static final DimensionsSpec DIMENSIONS_SPEC_NO_BITMAPS = new DimensionsSpec(
DIMENSION_SCHEMAS_NO_BITMAP,
null,
null
);
public static final String[] DOUBLE_METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"};
public static final String[] FLOAT_METRICS = new String[]{"indexFloat", "indexMinFloat", "indexMaxFloat"};
private static final Logger log = new Logger(TestIndex.class);
private static final Interval DATA_INTERVAL = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create(
Collections.singletonList(
new ExpressionVirtualColumn("expr", "index + 10", ValueType.FLOAT, TestExprMacroTable.INSTANCE)
)
);
public static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{
new DoubleSumAggregatorFactory(DOUBLE_METRICS[0], "index"),
new FloatSumAggregatorFactory(FLOAT_METRICS[0], "index"),
new DoubleMinAggregatorFactory(DOUBLE_METRICS[1], "index"),
new FloatMinAggregatorFactory(FLOAT_METRICS[1], "index"),
new FloatMaxAggregatorFactory(FLOAT_METRICS[2], "index"),
new DoubleMaxAggregatorFactory(DOUBLE_METRICS[2], VIRTUAL_COLUMNS.getVirtualColumns()[0].getOutputName()),
new HyperUniquesAggregatorFactory("quality_uniques", "quality")
};
private static final IndexSpec INDEX_SPEC = new IndexSpec();
private static final IndexMerger INDEX_MERGER =
TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance());
private static final IndexIO INDEX_IO = TestHelper.getTestIndexIO();
static {
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
}
private static Supplier<IncrementalIndex> realtimeIndex = Suppliers.memoize(
() -> makeRealtimeIndex("druid.sample.numeric.tsv")
);
private static Supplier<IncrementalIndex> noRollupRealtimeIndex = Suppliers.memoize(
() -> makeRealtimeIndex("druid.sample.numeric.tsv", false)
);
private static Supplier<IncrementalIndex> noBitmapRealtimeIndex = Suppliers.memoize(
() -> makeRealtimeIndex("druid.sample.numeric.tsv", false, false)
);
private static Supplier<QueryableIndex> mmappedIndex = Suppliers.memoize(
() -> persistRealtimeAndLoadMMapped(realtimeIndex.get())
);
private static Supplier<QueryableIndex> noRollupMmappedIndex = Suppliers.memoize(
() -> persistRealtimeAndLoadMMapped(noRollupRealtimeIndex.get())
);
private static Supplier<QueryableIndex> noBitmapMmappedIndex = Suppliers.memoize(
() -> persistRealtimeAndLoadMMapped(noBitmapRealtimeIndex.get())
);
private static Supplier<QueryableIndex> mergedRealtime = Suppliers.memoize(() -> {
try {
IncrementalIndex top = makeRealtimeIndex("druid.sample.numeric.tsv.top");
IncrementalIndex bottom = makeRealtimeIndex("druid.sample.numeric.tsv.bottom");
File tmpFile = File.createTempFile("yay", "who");
tmpFile.delete();
File topFile = new File(tmpFile, "top");
File bottomFile = new File(tmpFile, "bottom");
File mergedFile = new File(tmpFile, "merged");
topFile.mkdirs();
topFile.deleteOnExit();
bottomFile.mkdirs();
bottomFile.deleteOnExit();
mergedFile.mkdirs();
mergedFile.deleteOnExit();
INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, INDEX_SPEC, null);
INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, INDEX_SPEC, null);
return INDEX_IO.loadIndex(
INDEX_MERGER.mergeQueryableIndex(
Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)),
true,
METRIC_AGGS,
mergedFile,
INDEX_SPEC,
null
)
);
}
catch (IOException e) {
throw new RuntimeException(e);
}
});
public static IncrementalIndex getIncrementalTestIndex()
{
return realtimeIndex.get();
}
public static IncrementalIndex getNoRollupIncrementalTestIndex()
{
return noRollupRealtimeIndex.get();
}
public static IncrementalIndex getNoBitmapIncrementalTestIndex()
{
return noBitmapRealtimeIndex.get();
}
public static QueryableIndex getMMappedTestIndex()
{
return mmappedIndex.get();
}
public static QueryableIndex getNoRollupMMappedTestIndex()
{
return noRollupMmappedIndex.get();
}
public static QueryableIndex getNoBitmapMMappedTestIndex()
{
return noBitmapMmappedIndex.get();
}
public static QueryableIndex mergedRealtimeIndex()
{
return mergedRealtime.get();
}
public static IncrementalIndex makeRealtimeIndex(final String resourceFilename)
{
return makeRealtimeIndex(resourceFilename, true);
}
public static IncrementalIndex makeRealtimeIndex(final String resourceFilename, boolean rollup)
{
return makeRealtimeIndex(resourceFilename, rollup, true);
}
public static IncrementalIndex makeRealtimeIndex(final String resourceFilename, boolean rollup, boolean bitmap)
{
CharSource stream = getResourceCharSource(resourceFilename);
return makeRealtimeIndex(stream, rollup, bitmap);
}
public static CharSource getResourceCharSource(final String resourceFilename)
{
final URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename);
if (resource == null) {
throw new IllegalArgumentException("cannot find resource " + resourceFilename);
}
log.info("Realtime loading index file[%s]", resource);
return Resources.asByteSource(resource).asCharSource(StandardCharsets.UTF_8);
}
public static IncrementalIndex makeRealtimeIndex(final CharSource source)
{
return makeRealtimeIndex(source, true, true);
}
public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup, boolean bitmap)
{
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder()
.withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis())
.withTimestampSpec(new TimestampSpec("ds", "auto", null))
.withDimensionsSpec(bitmap ? DIMENSIONS_SPEC : DIMENSIONS_SPEC_NO_BITMAPS)
.withVirtualColumns(VIRTUAL_COLUMNS)
.withMetrics(METRIC_AGGS)
.withRollup(rollup)
.build();
final IncrementalIndex retVal = new IncrementalIndex.Builder()
.setIndexSchema(schema)
.setMaxRowCount(10000)
.buildOnheap();
try {
return loadIncrementalIndex(retVal, source);
}
catch (Exception e) {
if (rollup) {
realtimeIndex = null;
} else {
noRollupRealtimeIndex = null;
}
throw new RuntimeException(e);
}
}
public static IncrementalIndex loadIncrementalIndex(
final IncrementalIndex retVal,
final CharSource source
) throws IOException
{
final StringInputRowParser parser = new StringInputRowParser(
new DelimitedParseSpec(
new TimestampSpec("ts", "iso", null),
new DimensionsSpec(DIMENSION_SCHEMAS, null, null),
"\t",
"\u0001",
Arrays.asList(COLUMNS),
false,
0
),
"utf8"
);
return loadIncrementalIndex(() -> retVal, source, parser);
}
public static IncrementalIndex loadIncrementalIndex(
final Supplier<IncrementalIndex> indexSupplier,
final CharSource source,
final StringInputRowParser parser
) throws IOException
{
final IncrementalIndex retVal = indexSupplier.get();
final AtomicLong startTime = new AtomicLong();
int lineCount = source.readLines(
new LineProcessor<Integer>()
{
boolean runOnce = false;
int lineCount = 0;
@Override
public boolean processLine(String line) throws IOException
{
if (!runOnce) {
startTime.set(System.currentTimeMillis());
runOnce = true;
}
retVal.add(parser.parse(line));
++lineCount;
return true;
}
@Override
public Integer getResult()
{
return lineCount;
}
}
);
log.info("Loaded %,d lines in %,d millis.", lineCount, System.currentTimeMillis() - startTime.get());
return retVal;
}
public static QueryableIndex persistRealtimeAndLoadMMapped(IncrementalIndex index)
{
try {
File someTmpFile = File.createTempFile("billy", "yay");
someTmpFile.delete();
someTmpFile.mkdirs();
someTmpFile.deleteOnExit();
INDEX_MERGER.persist(index, someTmpFile, INDEX_SPEC, null);
return INDEX_IO.loadIndex(someTmpFile);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| |
/*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.classloader;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import org.broadinstitute.sting.utils.exceptions.DynamicClassResolutionException;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.reflections.Reflections;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.util.ConfigurationBuilder;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
/**
* Manage plugins and plugin configuration.
* @author mhanna
* @version 0.1
*/
public class PluginManager<PluginType> {
/**
* A reference into our introspection utility.
*/
private static final Reflections defaultReflections;
static {
// turn off logging in the reflections library - they talk too much (to the wrong logger factory as well, logback)
Logger logger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Reflections.class);
logger.setLevel(Level.OFF);
Set<URL> classPathUrls = new LinkedHashSet<URL>();
URL cwd;
try {
cwd = new File(".").getAbsoluteFile().toURI().toURL();
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
// NOTE: Reflections also scans directories for classes.
// Meanwhile some of the jar MANIFEST.MF Bundle-ClassPath properties contain "."
// Do NOT let reflections scan the CWD where it often picks up test classes when
// they weren't explicitly in the classpath, for example the UninstantiableWalker
for (URL url: JVMUtils.getClasspathURLs())
if (!url.equals(cwd))
classPathUrls.add(url);
defaultReflections = new Reflections( new ConfigurationBuilder()
.setUrls(classPathUrls)
.setScanners(new SubTypesScanner()));
}
/**
* Defines the category of plugin defined by the subclass.
*/
protected final String pluginCategory;
/**
* Define common strings to trim off the end of the name.
*/
protected final String pluginSuffix;
/**
* Plugins stored based on their name.
*/
private final SortedMap<String, Class<? extends PluginType>> pluginsByName;
private final List<Class<? extends PluginType>> plugins;
private final List<Class<? extends PluginType>> interfaces;
/**
* Create a new plugin manager.
* @param pluginType Core type for a plugin.
*/
public PluginManager(Class<PluginType> pluginType) {
this(pluginType, pluginType.getSimpleName().toLowerCase(), pluginType.getSimpleName(), null);
}
/**
* Create a new plugin manager.
* @param pluginType Core type for a plugin.
* @param classpath Custom class path to search for classes.
*/
public PluginManager(Class<PluginType> pluginType, List<URL> classpath) {
this(pluginType, pluginType.getSimpleName().toLowerCase(), pluginType.getSimpleName(), classpath);
}
/**
* Create a new plugin manager.
* @param pluginType Core type for a plugin.
* @param pluginCategory Provides a category name to the plugin. Must not be null.
* @param pluginSuffix Provides a suffix that will be trimmed off when converting to a plugin name. Can be null.
*/
public PluginManager(Class<PluginType> pluginType, String pluginCategory, String pluginSuffix) {
this(pluginType, pluginCategory, pluginSuffix, null);
}
/**
* Create a new plugin manager.
* @param pluginType Core type for a plugin.
* @param pluginCategory Provides a category name to the plugin. Must not be null.
* @param pluginSuffix Provides a suffix that will be trimmed off when converting to a plugin name. Can be null.
* @param classpath Custom class path to search for classes.
*/
public PluginManager(Class<PluginType> pluginType, String pluginCategory, String pluginSuffix, List<URL> classpath) {
this.pluginCategory = pluginCategory;
this.pluginSuffix = pluginSuffix;
this.plugins = new ArrayList<Class<? extends PluginType>>();
this.interfaces = new ArrayList<Class<? extends PluginType>>();
Reflections reflections;
if (classpath == null) {
reflections = defaultReflections;
} else {
addClasspath(classpath);
reflections = new Reflections( new ConfigurationBuilder()
.setUrls(classpath)
.setScanners(new SubTypesScanner()));
}
// Load all classes types filtering them by concrete.
Set<Class<? extends PluginType>> allTypes = reflections.getSubTypesOf(pluginType);
for( Class<? extends PluginType> type: allTypes ) {
// The plugin manager does not support anonymous classes; to be a plugin, a class must have a name.
if(JVMUtils.isAnonymous(type))
continue;
if( JVMUtils.isConcrete(type) )
plugins.add(type);
else
interfaces.add(type);
}
pluginsByName = new TreeMap<String, Class<? extends PluginType>>();
for (Class<? extends PluginType> pluginClass : plugins) {
String pluginName = getName(pluginClass);
pluginsByName.put(pluginName, pluginClass);
}
}
/**
* Adds the URL to the system class loader classpath using reflection.
* HACK: Uses reflection to modify the class path, and assumes loader is a URLClassLoader.
* @param urls URLs to add to the system class loader classpath.
*/
private static void addClasspath(List<URL> urls) {
Set<URL> existing = JVMUtils.getClasspathURLs();
for (URL url : urls) {
if (existing.contains(url))
continue;
try {
Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
if (!method.isAccessible())
method.setAccessible(true);
method.invoke(ClassLoader.getSystemClassLoader(), url);
} catch (Exception e) {
throw new ReviewedStingException("Error adding url to the current classloader.", e);
}
}
}
public Map<String, Class<? extends PluginType>> getPluginsByName() {
return Collections.unmodifiableMap(pluginsByName);
}
/**
* Does a plugin with the given name exist?
*
* @param pluginName Name of the plugin for which to search.
* @return True if the plugin exists, false otherwise.
*/
public boolean exists(String pluginName) {
return pluginsByName.containsKey(pluginName);
}
/**
* Does a plugin with the given name exist?
*
* @param plugin Name of the plugin for which to search.
* @return True if the plugin exists, false otherwise.
*/
public boolean exists(Class<?> plugin) {
return pluginsByName.containsValue(plugin);
}
/**
* Returns the plugin classes
* @return the plugin classes
*/
public List<Class<? extends PluginType>> getPlugins() {
return plugins;
}
/**
* Returns the interface classes
* @return the interface classes
*/
public List<Class<? extends PluginType>> getInterfaces() {
return interfaces;
}
/**
* Returns the plugin classes implementing interface or base clase
* @param type type of interface or base class
* @return the plugin classes implementing interface or base class
*/
public List<Class<? extends PluginType>> getPluginsImplementing(Class<?> type) {
List<Class<? extends PluginType>> implementing = new ArrayList<Class<? extends PluginType>>();
for (Class<? extends PluginType> plugin: getPlugins())
if (type.isAssignableFrom(plugin))
implementing.add(plugin);
return implementing;
}
/**
* Gets a plugin with the given name
*
* @param pluginName Name of the plugin to retrieve.
* @return The plugin object if found; null otherwise.
*/
public PluginType createByName(String pluginName) {
Class<? extends PluginType> plugin = pluginsByName.get(pluginName);
if( plugin == null )
throw new UserException(String.format("Could not find %s with name: %s", pluginCategory,pluginName));
try {
return plugin.newInstance();
} catch (Exception e) {
throw new DynamicClassResolutionException(plugin, e);
}
}
/**
* create a plugin with the given type
*
* @param pluginType type of the plugin to create.
* @return The plugin object if created; null otherwise.
*/
public PluginType createByType(Class<? extends PluginType> pluginType) {
try {
Constructor<? extends PluginType> noArgsConstructor = pluginType.getDeclaredConstructor((Class[])null);
noArgsConstructor.setAccessible(true);
return noArgsConstructor.newInstance();
} catch (Exception e) {
throw new DynamicClassResolutionException(pluginType, e);
}
}
/**
* Returns concrete instances of the plugins
* @return concrete instances of the plugins
*/
public List<PluginType> createAllTypes() {
List<PluginType> instances = new ArrayList<PluginType>();
for ( Class<? extends PluginType> c : getPlugins() ) {
instances.add(createByType(c));
}
return instances;
}
/**
* Create a name for this type of plugin.
*
* @param pluginType The type of plugin.
* @return A name for this type of plugin.
*/
public String getName(Class<? extends PluginType> pluginType) {
String pluginName = "";
if (pluginName.length() == 0) {
pluginName = pluginType.getSimpleName();
if (pluginSuffix != null && pluginName.endsWith(pluginSuffix))
pluginName = pluginName.substring(0, pluginName.lastIndexOf(pluginSuffix));
}
return pluginName;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators;
import org.apache.pig.PigWarning;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor;
import org.apache.pig.data.DataType;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.impl.plan.NodeIdGenerator;
import org.apache.pig.impl.plan.VisitorException;
public class Divide extends BinaryExpressionOperator {
/**
*
*/
private static final long serialVersionUID = 1L;
public Divide(OperatorKey k) {
super(k);
}
public Divide(OperatorKey k, int rp) {
super(k, rp);
}
@Override
public void visit(PhyPlanVisitor v) throws VisitorException {
v.visitDivide(this);
}
@Override
public String name() {
return "Divide" + "[" + DataType.findTypeName(resultType) + "]" +" - " + mKey.toString();
}
@Override
public Result getNext(Double d) throws ExecException {
Result r = accumChild(null, d);
if (r != null) {
return r;
}
byte status;
Result res;
Double left = null, right = null;
res = lhs.getNext(left);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
left = (Double) res.result;
res = rhs.getNext(right);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
right = (Double) res.result;
if (right == 0) {
if(pigLogger != null) {
pigLogger.warn(this, "Divide by zero. Converting it to NULL.", PigWarning.DIVIDE_BY_ZERO);
}
res.result = null;
}
else
res.result = new Double(left / right);
return res;
}
@Override
public Result getNext(Float f) throws ExecException {
Result r = accumChild(null, f);
if (r != null) {
return r;
}
byte status;
Result res;
Float left = null, right = null;
res = lhs.getNext(left);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
left = (Float) res.result;
res = rhs.getNext(right);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
right = (Float) res.result;
if (right == 0) {
if(pigLogger != null) {
pigLogger.warn(this, "Divide by zero. Converting it to NULL.", PigWarning.DIVIDE_BY_ZERO);
}
res.result = null;
}
else
res.result = new Float(left / right);
return res;
}
@Override
public Result getNext(Integer i) throws ExecException {
Result r = accumChild(null, i);
if (r != null) {
return r;
}
byte status;
Result res;
Integer left = null, right = null;
res = lhs.getNext(left);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
left = (Integer) res.result;
res = rhs.getNext(right);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
right = (Integer) res.result;
if (right == 0) {
if(pigLogger != null) {
pigLogger.warn(this, "Divide by zero. Converting it to NULL.", PigWarning.DIVIDE_BY_ZERO);
}
res.result = null;
}
else
res.result = Integer.valueOf(left / right);
return res;
}
@Override
public Result getNext(Long l) throws ExecException {
Result r = accumChild(null, l);
if (r != null) {
return r;
}
byte status;
Result res;
Long left = null, right = null;
res = lhs.getNext(left);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
left = (Long) res.result;
res = rhs.getNext(right);
status = res.returnStatus;
if(status != POStatus.STATUS_OK || res.result == null) {
return res;
}
right = (Long) res.result;
if (right == 0) {
if(pigLogger != null) {
pigLogger.warn(this, "Divide by zero. Converting it to NULL.", PigWarning.DIVIDE_BY_ZERO);
}
res.result = null;
}
else
res.result = Long.valueOf(left / right);
return res;
}
@Override
public Divide clone() throws CloneNotSupportedException {
Divide clone = new Divide(new OperatorKey(mKey.scope,
NodeIdGenerator.getGenerator().getNextNodeId(mKey.scope)));
clone.cloneHelper(this);
return clone;
}
}
| |
/* Generated By:JavaCC: Do not edit this line. SSPTParserTokenManager.java */
package org.apache.camel.component.sql.stored.template.generated;
import java.io.Reader;
import org.apache.camel.spi.ClassResolver;
import org.apache.camel.component.sql.stored.template.ast.*;
/** Token Manager. */
public class SSPTParserTokenManager implements SSPTParserConstants
{
/** Debug output. */
public java.io.PrintStream debugStream = System.out;
/** Set debug output. */
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
private final int jjStopStringLiteralDfa_0(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0xcL) != 0L)
{
jjmatchedKind = 17;
return 15;
}
if ((active0 & 0x2L) != 0L)
return 25;
return -1;
case 1:
if ((active0 & 0xcL) != 0L)
{
jjmatchedKind = 17;
jjmatchedPos = 1;
return 15;
}
return -1;
case 2:
if ((active0 & 0xcL) != 0L)
{
jjmatchedKind = 17;
jjmatchedPos = 2;
return 15;
}
return -1;
case 3:
if ((active0 & 0x8L) != 0L)
{
jjmatchedKind = 17;
jjmatchedPos = 3;
return 15;
}
return -1;
case 4:
if ((active0 & 0x8L) != 0L)
{
jjmatchedKind = 17;
jjmatchedPos = 4;
return 15;
}
return -1;
default :
return -1;
}
}
private final int jjStartNfa_0(int pos, long active0)
{
return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
}
private int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private int jjMoveStringLiteralDfa0_0()
{
switch(curChar)
{
case 32:
return jjStartNfaWithStates_0(0, 1, 25);
case 73:
return jjMoveStringLiteralDfa1_0(0x8L);
case 79:
return jjMoveStringLiteralDfa1_0(0x4L);
default :
return jjMoveNfa_0(8, 0);
}
}
private int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(0, active0);
return 1;
}
switch(curChar)
{
case 78:
return jjMoveStringLiteralDfa2_0(active0, 0x8L);
case 85:
return jjMoveStringLiteralDfa2_0(active0, 0x4L);
default :
break;
}
return jjStartNfa_0(0, active0);
}
private int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(0, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(1, active0);
return 2;
}
switch(curChar)
{
case 79:
return jjMoveStringLiteralDfa3_0(active0, 0x8L);
case 84:
return jjMoveStringLiteralDfa3_0(active0, 0x4L);
default :
break;
}
return jjStartNfa_0(1, active0);
}
private int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(1, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(2, active0);
return 3;
}
switch(curChar)
{
case 32:
if ((active0 & 0x4L) != 0L)
return jjStopAtPos(3, 2);
break;
case 85:
return jjMoveStringLiteralDfa4_0(active0, 0x8L);
default :
break;
}
return jjStartNfa_0(2, active0);
}
private int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(2, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(3, active0);
return 4;
}
switch(curChar)
{
case 84:
return jjMoveStringLiteralDfa5_0(active0, 0x8L);
default :
break;
}
return jjStartNfa_0(3, active0);
}
private int jjMoveStringLiteralDfa5_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(3, old0);
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) {
jjStopStringLiteralDfa_0(4, active0);
return 5;
}
switch(curChar)
{
case 32:
if ((active0 & 0x8L) != 0L)
return jjStopAtPos(5, 3);
break;
default :
break;
}
return jjStartNfa_0(4, active0);
}
private int jjStartNfaWithStates_0(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return pos + 1; }
return jjMoveNfa_0(state, pos + 1);
}
private int jjMoveNfa_0(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 25;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch(jjstateSet[--i])
{
case 8:
if ((0x7ff609c00000000L & l) != 0L)
{
if (kind > 17)
kind = 17;
jjCheckNAdd(15);
}
else if ((0x100002600L & l) != 0L)
{
if (kind > 9)
kind = 9;
jjCheckNAddStates(0, 5);
}
else if (curChar == 40)
{
if (kind > 12)
kind = 12;
jjCheckNAddTwoStates(17, 19);
}
else if (curChar == 41)
{
if (kind > 13)
kind = 13;
jjCheckNAdd(4);
}
else if (curChar == 44)
{
if (kind > 11)
kind = 11;
jjCheckNAdd(2);
}
if ((0x3ff200000000000L & l) != 0L)
{
if (kind > 5)
kind = 5;
jjCheckNAdd(0);
}
else if (curChar == 39)
jjCheckNAdd(13);
else if (curChar == 58)
jjstateSet[jjnewStateCnt++] = 9;
else if (curChar == 36)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 25:
if ((0x100002600L & l) != 0L)
jjCheckNAddTwoStates(24, 3);
else if (curChar == 41)
{
if (kind > 13)
kind = 13;
jjCheckNAdd(4);
}
else if (curChar == 40)
{
if (kind > 12)
kind = 12;
jjCheckNAdd(19);
}
else if (curChar == 44)
{
if (kind > 11)
kind = 11;
jjCheckNAdd(2);
}
if ((0x100002600L & l) != 0L)
jjCheckNAddTwoStates(22, 23);
if ((0x100002600L & l) != 0L)
jjCheckNAddTwoStates(21, 1);
break;
case 0:
if ((0x3ff200000000000L & l) == 0L)
break;
if (kind > 5)
kind = 5;
jjCheckNAdd(0);
break;
case 1:
if (curChar != 44)
break;
kind = 11;
jjCheckNAdd(2);
break;
case 2:
if ((0x100002600L & l) == 0L)
break;
if (kind > 11)
kind = 11;
jjCheckNAdd(2);
break;
case 3:
if (curChar != 41)
break;
if (kind > 13)
kind = 13;
jjCheckNAdd(4);
break;
case 4:
if ((0x100002600L & l) == 0L)
break;
if (kind > 13)
kind = 13;
jjCheckNAdd(4);
break;
case 6:
if ((0x7ff609d00000000L & l) != 0L)
jjAddStates(6, 7);
break;
case 9:
if (curChar == 35)
jjCheckNAdd(10);
break;
case 10:
if ((0x7ff609c00000000L & l) == 0L)
break;
if (kind > 15)
kind = 15;
jjCheckNAdd(10);
break;
case 11:
if (curChar == 58)
jjstateSet[jjnewStateCnt++] = 9;
break;
case 12:
if (curChar == 39)
jjCheckNAdd(13);
break;
case 13:
if ((0x7ff609c00000000L & l) != 0L)
jjCheckNAddTwoStates(13, 14);
break;
case 14:
if (curChar == 39 && kind > 16)
kind = 16;
break;
case 15:
if ((0x7ff609c00000000L & l) == 0L)
break;
if (kind > 17)
kind = 17;
jjCheckNAdd(15);
break;
case 16:
if (curChar != 40)
break;
if (kind > 12)
kind = 12;
jjCheckNAddTwoStates(17, 19);
break;
case 17:
if ((0x3ff200000000000L & l) != 0L)
jjCheckNAddTwoStates(17, 18);
break;
case 18:
if (curChar == 41 && kind > 4)
kind = 4;
break;
case 19:
if ((0x100002600L & l) == 0L)
break;
if (kind > 12)
kind = 12;
jjCheckNAdd(19);
break;
case 20:
if ((0x100002600L & l) == 0L)
break;
if (kind > 9)
kind = 9;
jjCheckNAddStates(0, 5);
break;
case 21:
if ((0x100002600L & l) != 0L)
jjCheckNAddTwoStates(21, 1);
break;
case 22:
if ((0x100002600L & l) != 0L)
jjCheckNAddTwoStates(22, 23);
break;
case 23:
if (curChar != 40)
break;
if (kind > 12)
kind = 12;
jjCheckNAdd(19);
break;
case 24:
if ((0x100002600L & l) != 0L)
jjCheckNAddTwoStates(24, 3);
break;
default : break;
}
} while(i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
case 8:
case 15:
if ((0x2ffffffeaffffffeL & l) == 0L)
break;
if (kind > 17)
kind = 17;
jjCheckNAdd(15);
break;
case 5:
if (curChar == 123)
jjCheckNAdd(6);
break;
case 6:
if ((0x2ffffffeaffffffeL & l) != 0L)
jjCheckNAddTwoStates(6, 7);
break;
case 7:
if (curChar == 125 && kind > 14)
kind = 14;
break;
case 10:
if ((0x2ffffffeaffffffeL & l) == 0L)
break;
if (kind > 15)
kind = 15;
jjstateSet[jjnewStateCnt++] = 10;
break;
case 13:
if ((0x2ffffffeaffffffeL & l) != 0L)
jjAddStates(8, 9);
break;
default : break;
}
} while(i != startsAt);
}
else
{
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch(jjstateSet[--i])
{
default : break;
}
} while(i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 25 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
21, 1, 22, 23, 24, 3, 6, 7, 13, 14,
};
/** Token literal values. */
public static final String[] jjstrLiteralImages = {
"", "\40", "\117\125\124\40", "\111\116\117\125\124\40", null, null, null,
null, null, null, null, null, null, null, null, null, null, null, };
/** Lexer state names. */
public static final String[] lexStateNames = {
"DEFAULT",
};
protected SimpleCharStream input_stream;
private final int[] jjrounds = new int[25];
private final int[] jjstateSet = new int[50];
protected char curChar;
/** Constructor. */
public SSPTParserTokenManager(SimpleCharStream stream){
if (SimpleCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
/** Constructor. */
public SSPTParserTokenManager(SimpleCharStream stream, int lexState){
this(stream);
SwitchTo(lexState);
}
/** Reinitialise parser. */
public void ReInit(SimpleCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 25; i-- > 0;)
jjrounds[i] = 0x80000000;
}
/** Reinitialise parser. */
public void ReInit(SimpleCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
/** Switch to specified lex state. */
public void SwitchTo(int lexState)
{
if (lexState >= 1 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
else
curLexState = lexState;
}
protected Token jjFillToken()
{
final Token t;
final String curTokenImage;
final int beginLine;
final int endLine;
final int beginColumn;
final int endColumn;
String im = jjstrLiteralImages[jjmatchedKind];
curTokenImage = (im == null) ? input_stream.GetImage() : im;
beginLine = input_stream.getBeginLine();
beginColumn = input_stream.getBeginColumn();
endLine = input_stream.getEndLine();
endColumn = input_stream.getEndColumn();
t = Token.newToken(jjmatchedKind, curTokenImage);
t.beginLine = beginLine;
t.endLine = endLine;
t.beginColumn = beginColumn;
t.endColumn = endColumn;
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
/** Get the next Token. */
public Token getNextToken()
{
Token matchedToken;
int curPos = 0;
EOFLoop :
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch(java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
return matchedToken;
}
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
matchedToken = jjFillToken();
return matchedToken;
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
private void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
}
| |
package io.reactivesw.category.api.executor.schema;
import graphql.language.IntValue;
import graphql.language.StringValue;
import graphql.schema.Coercing;
import graphql.schema.GraphQLScalarType;
import graphql.schema.GraphQLType;
import java.time.ZonedDateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigInteger;
import java.text.DateFormat;
import java.text.ParseException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeParseException;
import java.util.Date;
import java.util.TimeZone;
/**
*
*/
class JavaScalars {
static private final Logger log = LoggerFactory.getLogger(JavaScalars.class);
static GraphQLType buildGqlScalar(Class<?> javaType) {
GraphQLType graphQLType = null;
if (Date.class.isAssignableFrom(javaType)) {
graphQLType = JavaScalars.GraphQLDate;
} else if (LocalDateTime.class.isAssignableFrom(javaType)) {
graphQLType = JavaScalars.GraphQLLocalDateTime;
} else if (ZonedDateTime.class.isAssignableFrom(javaType)) {
graphQLType = JavaScalars.GraphQLZonedDateTime;
} else if (LocalDate.class.isAssignableFrom(javaType)) {
graphQLType = JavaScalars.GraphQLLocalDate;
}
return graphQLType;
}
private static final GraphQLScalarType GraphQLLocalDateTime = new GraphQLScalarType(
"LocalDateTime", "Date type", new Coercing() {
@Override
public Object serialize(Object input) {
if (input instanceof String) {
return parseStringToLocalDateTime((String) input);
} else if (input instanceof LocalDateTime) {
return input;
} else if (input instanceof Long) {
return parseLongToLocalDateTime((Long) input);
} else if (input instanceof Integer) {
return parseLongToLocalDateTime((Integer) input);
}
return null;
}
@Override
public Object parseValue(Object input) {
return serialize(input);
}
@Override
public Object parseLiteral(Object input) {
if (input instanceof StringValue) {
return parseStringToLocalDateTime(((StringValue) input).getValue());
} else if (input instanceof IntValue) {
BigInteger value = ((IntValue) input).getValue();
return parseLongToLocalDateTime(value.longValue());
}
return null;
}
private LocalDateTime parseLongToLocalDateTime(long input) {
return LocalDateTime
.ofInstant(Instant.ofEpochSecond(input), TimeZone.getDefault().toZoneId());
}
private LocalDateTime parseStringToLocalDateTime(String input) {
try {
return LocalDateTime.parse(input);
} catch (DateTimeParseException e) {
log.warn("Failed to parse Date from input: " + input, e);
return null;
}
}
});
private static final GraphQLScalarType GraphQLZonedDateTime = new GraphQLScalarType(
"ZonedDateTime", "ZonedDateTime type", new Coercing() {
@Override
public Object serialize(Object input) {
if (input instanceof String) {
return parseStringToZonedDateTime((String) input);
} else if (input instanceof ZonedDateTime) {
return input;
} else if (input instanceof Long) {
return parseLongToZonedDateTime((Long) input);
} else if (input instanceof Integer) {
return parseLongToZonedDateTime((Integer) input);
}
return null;
}
@Override
public Object parseValue(Object input) {
return serialize(input);
}
@Override
public Object parseLiteral(Object input) {
if (input instanceof StringValue) {
return parseStringToZonedDateTime(((StringValue) input).getValue());
} else if (input instanceof IntValue) {
BigInteger value = ((IntValue) input).getValue();
return parseLongToZonedDateTime(value.longValue());
}
return null;
}
private ZonedDateTime parseLongToZonedDateTime(long input) {
return ZonedDateTime
.ofInstant(Instant.ofEpochSecond(input), TimeZone.getDefault().toZoneId());
}
private ZonedDateTime parseStringToZonedDateTime(String input) {
try {
return ZonedDateTime.parse(input);
} catch (DateTimeParseException e) {
log.warn("Failed to parse Zoned Date from input: " + input, e);
return null;
}
}
});
private static final GraphQLScalarType GraphQLLocalDate = new GraphQLScalarType(
"LocalDate", "LocalDate type",
new Coercing() {
@Override
public Object serialize(Object input) {
if (input instanceof String) {
return parseStringToLocalDate((String) input);
} else if (input instanceof LocalDate) {
return input;
} else if (input instanceof Long) {
return parseLongToLocalDate((Long) input);
} else if (input instanceof Integer) {
return parseLongToLocalDate((Integer) input);
}
return null;
}
@Override
public Object parseValue(Object input) {
return serialize(input);
}
@Override
public Object parseLiteral(Object input) {
if (input instanceof StringValue) {
return parseStringToLocalDate(((StringValue) input).getValue());
} else if (input instanceof IntValue) {
BigInteger value = ((IntValue) input).getValue();
return parseLongToLocalDate(value.longValue());
}
return null;
}
private LocalDate parseLongToLocalDate(long input) {
return LocalDateTime
.ofInstant(Instant.ofEpochSecond(input), TimeZone.getDefault().toZoneId())
.toLocalDate();
}
private LocalDate parseStringToLocalDate(String input) {
try {
return LocalDate.parse(input);
} catch (DateTimeParseException e) {
log.warn("Failed to parse Date from input: " + input, e);
return null;
}
}
});
private static final GraphQLScalarType GraphQLDate = new GraphQLScalarType(
"Date", "Date type", new Coercing() {
@Override
public Object serialize(Object input) {
if (input instanceof String) {
return parseStringToDate((String) input);
} else if (input instanceof Date) {
return input;
} else if (input instanceof Long) {
return new Date((Long) input);
} else if (input instanceof Integer) {
return new Date(((Integer) input).longValue());
}
return null;
}
@Override
public Object parseValue(Object input) {
return serialize(input);
}
@Override
public Object parseLiteral(Object input) {
if (input instanceof StringValue) {
return parseStringToDate(((StringValue) input).getValue());
} else if (input instanceof IntValue) {
BigInteger value = ((IntValue) input).getValue();
return new Date(value.longValue());
}
return null;
}
private Date parseStringToDate(String input) {
try {
return DateFormat.getInstance().parse(input);
} catch (ParseException e) {
log.warn("Failed to parse Date from input: " + input, e);
return null;
}
}
});
}
| |
//******************************************************************
//
// Copyright 2015 Samsung Electronics All Rights Reserved.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
/// @file Activator.java
package oic.plugin.hue;
import java.util.EnumSet;
import java.util.List;
import oic.plugin.hue.AccessPointListAdapter;
import oic.plugin.hue.HueSharedPreferences;
import org.iotivity.base.ModeType;
import org.iotivity.base.OcException;
import org.iotivity.base.OcPlatform;
import org.iotivity.base.OcResource;
import org.iotivity.base.ObserveType;
import org.iotivity.base.OcResourceHandle;
import org.iotivity.base.PlatformConfig;
import org.iotivity.base.QualityOfService;
import org.iotivity.base.ResourceProperty;
import org.iotivity.base.ServiceType;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import com.philips.lighting.hue.sdk.PHAccessPoint;
import com.philips.lighting.hue.sdk.PHBridgeSearchManager;
import com.philips.lighting.hue.sdk.PHHueSDK;
import com.philips.lighting.hue.sdk.PHSDKListener;
import com.philips.lighting.model.PHBridge;
import com.philips.lighting.model.PHHueError;
import com.philips.lighting.model.PHHueParsingError;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.util.Log;
public class Activator extends Activity implements BundleActivator {
private static BundleContext context;
private static Context AppContext;
private PHHueSDK phHueSDK;
public static final String TAG = "Hue Plugin";
private HueSharedPreferences prefs;
private AccessPointListAdapter adapter;
class Light {
public String m_power;
public int m_brightness;
public int m_color;
public String m_name;
public Light() {
m_power = "off";
m_brightness = 0;
m_color = 0;
m_name = "device.light";
}
}
static int OBSERVE_TYPE_TO_USE = ObserveType.OBSERVE
.getValue();
public static Light myLight;
public static OcResource curResource;
public static OcResourceHandle resourceHandle;
static int oc = 0;
static Activity mActivity;
static BundleContext getContext() {
return context;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mActivity = this;
}
public void start(BundleContext bundleContext) throws Exception {
Activator.context = bundleContext;
ServiceReference<Context> ref = Activator.context
.getServiceReference(Context.class);
AppContext = Activator.context.getService(ref);
// Gets an instance of the Hue SDK.
phHueSDK = PHHueSDK.getInstance();
// Set the Device Name (name of your app). This will be stored in your
// bridge whitelist entry.
phHueSDK.setAppName("QuickStartApp");
phHueSDK.setDeviceName(android.os.Build.MODEL);
// Register the PHSDKListener to receive callbacks from the bridge.
phHueSDK.getNotificationManager().registerSDKListener(listener);
adapter = new AccessPointListAdapter(AppContext,
phHueSDK.getAccessPointsFound());
// Try to automatically connect to the last known bridge. For first time
// use this will be empty so a bridge search is automatically started.
prefs = HueSharedPreferences.getInstance(AppContext);
String lastIpAddress = prefs.getLastConnectedIPAddress();
String lastUsername = prefs.getUsername();
// Automatically try to connect to the last connected IP Address. For
// multiple bridge support a different implementation is required.
if (lastIpAddress != null && !lastIpAddress.equals("")) {
PHAccessPoint lastAccessPoint = new PHAccessPoint();
lastAccessPoint.setIpAddress(lastIpAddress);
lastAccessPoint.setUsername(lastUsername);
if (!phHueSDK.isAccessPointConnected(lastAccessPoint)) {
phHueSDK.connect(lastAccessPoint);
}
} else { // First time use, so perform a bridge search.
doBridgeSearch();
Log.w(TAG, "Searching for Bridges.");
}
myLight = new Light();
}
public void stop(BundleContext bundleContext) throws Exception {
OcPlatform.unregisterResource(resourceHandle);
context = null;
myLight = null;
// phHueSDK = null;
}
public void doBridgeSearch() {
PHBridgeSearchManager sm = (PHBridgeSearchManager) phHueSDK
.getSDKService(PHHueSDK.SEARCH_BRIDGE);
// Start the UPNP Searching of local bridges.
sm.search(true, true);
}
private PHSDKListener listener = new PHSDKListener() {
@Override
public void onAccessPointsFound(
List<PHAccessPoint> accessPoint) {
Log.w(TAG, "Access Points Found. "
+ accessPoint.size());
if (accessPoint != null
&& accessPoint.size() > 0) {
phHueSDK.getAccessPointsFound()
.clear();
phHueSDK.getAccessPointsFound()
.addAll(accessPoint);
runOnUiThread(new Runnable() {
@Override
public void run() {
adapter.updateData(phHueSDK
.getAccessPointsFound());
}
});
HueSharedPreferences prefs2 = HueSharedPreferences
.getInstance(AppContext);
PHAccessPoint accessPoint2 = (PHAccessPoint) adapter
.getItem(0);
accessPoint2.setUsername(prefs2
.getUsername());
PHBridge connectedBridge = phHueSDK
.getSelectedBridge();
if (connectedBridge != null) {
String connectedIP = connectedBridge
.getResourceCache()
.getBridgeConfiguration()
.getIpAddress();
Log.w(TAG, "Connected IP: "
+ connectedIP);
if (connectedIP != null) { // We
// are
// already
// connected
// here:-
Log.w(TAG,
"Connected IP != null");
phHueSDK.disableHeartbeat(connectedBridge);
phHueSDK.disconnect(connectedBridge);
}
}
phHueSDK.connect(accessPoint2);
}
}
@Override
public void onCacheUpdated(
List<Integer> arg0,
PHBridge bridge) {
Log.w(TAG, "On CacheUpdated.");
}
@Override
public void onBridgeConnected(PHBridge b) {
Log.w(TAG,
"Connected to the Bridge.");
phHueSDK.setSelectedBridge(b);
phHueSDK.enableHeartbeat(b,
PHHueSDK.HB_INTERVAL);
phHueSDK.getLastHeartbeat()
.put(b.getResourceCache()
.getBridgeConfiguration()
.getIpAddress(),
System.currentTimeMillis());
PlatformConfig cfg = new PlatformConfig(
this,
ServiceType.IN_PROC,
ModeType.CLIENT_SERVER,
"0.0.0.0", 0,
QualityOfService.LOW);
OcPlatform.Configure(cfg);
EntityHandlerHue entitycb = new EntityHandlerHue();
try {
resourceHandle = OcPlatform
.registerResource(
"/a/huebulb",
"device.light",
"oc.mi.def",
entitycb,
EnumSet.of(ResourceProperty.DISCOVERABLE));
} catch (OcException e) {
e.printStackTrace();
}
}
@Override
public void onAuthenticationRequired(
PHAccessPoint accessPoint) {
Log.w(TAG,
"Authentication Required.");
phHueSDK.startPushlinkAuthentication(accessPoint);
Log.w(TAG, "Access Point IP addr: "
+ accessPoint.getIpAddress());
Log.w(TAG,
"Access Point MAC addr: "
+ accessPoint
.getMacAddress());
Log.w(TAG, "Access Point key: "
+ accessPoint.getUsername());
}
@Override
public void onConnectionResumed(
PHBridge bridge) {
// Log.v(TAG, "onConnectionResumed "
// +
// bridge.getResourceCache().getBridgeConfiguration().getIpAddress());
phHueSDK.getLastHeartbeat()
.put(bridge
.getResourceCache()
.getBridgeConfiguration()
.getIpAddress(),
System.currentTimeMillis());
for (int i = 0; i < phHueSDK
.getDisconnectedAccessPoint()
.size(); i++) {
if (phHueSDK
.getDisconnectedAccessPoint()
.get(i)
.getIpAddress()
.equals(bridge
.getResourceCache()
.getBridgeConfiguration()
.getIpAddress())) {
phHueSDK.getDisconnectedAccessPoint()
.remove(i);
}
}
}
@Override
public void onConnectionLost(
PHAccessPoint accessPoint) {
Log.v(TAG, "onConnectionLost : "
+ accessPoint.getIpAddress());
if (!phHueSDK
.getDisconnectedAccessPoint()
.contains(accessPoint)) {
phHueSDK.getDisconnectedAccessPoint()
.add(accessPoint);
}
}
@Override
public void onError(int code,
final String message) {
Log.e(TAG, "on Error Called : "
+ code + ":" + message);
if (code == PHHueError.NO_CONNECTION) {
Log.w(TAG, "On No Connection");
} else if (code == PHHueError.AUTHENTICATION_FAILED
|| code == 1158) {
} else if (code == PHHueError.BRIDGE_NOT_RESPONDING) {
Log.w(TAG,
"Bridge Not Responding . . . ");
}
}
@Override
public void onParsingErrors(
List<PHHueParsingError> parsingErrorsList) {
for (PHHueParsingError parsingError : parsingErrorsList) {
Log.e(TAG,
"ParsingError : "
+ parsingError
.getMessage());
}
}
};
}
| |
package apoc.periodic;
import org.neo4j.procedure.*;
import apoc.Pools;
import apoc.util.Util;
import org.neo4j.graphdb.Result;
import org.neo4j.helpers.collection.Iterators;
import org.neo4j.kernel.api.KernelTransaction;
import org.neo4j.kernel.internal.GraphDatabaseAPI;
import org.neo4j.logging.Log;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import static apoc.util.Util.merge;
import static java.lang.System.nanoTime;
import static java.util.Collections.singletonMap;
public class Periodic {
@Context public GraphDatabaseAPI db;
@Context public Log log;
final static Map<JobInfo,Future> list = new ConcurrentHashMap<>();
static {
Runnable runnable = () -> {
for (Iterator<Map.Entry<JobInfo, Future>> it = list.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<JobInfo, Future> entry = it.next();
if (entry.getValue().isDone() || entry.getValue().isCancelled()) it.remove();
}
};
Pools.SCHEDULED.scheduleAtFixedRate(runnable,10,10,TimeUnit.SECONDS);
}
@Context
public KernelTransaction tx;
@Procedure
@Description("apoc.periodic.list - list all jobs")
public Stream<JobInfo> list() {
return list.entrySet().stream().map( (e) -> e.getKey().update(e.getValue()));
}
@Procedure(mode = Mode.WRITE)
@Description("apoc.periodic.commit(statement,params) - runs the given statement in separate transactions until it returns 0")
public Stream<RundownResult> commit(@Name("statement") String statement, @Name("params") Map<String,Object> parameters) throws ExecutionException, InterruptedException {
Map<String,Object> params = parameters == null ? Collections.emptyMap() : parameters;
long total = 0, executions = 0, updates = 0;
long start = nanoTime();
AtomicInteger batches = new AtomicInteger();
AtomicInteger failedCommits = new AtomicInteger();
Map<String,Long> commitErrors = new ConcurrentHashMap<>();
AtomicInteger failedBatches = new AtomicInteger();
Map<String,Long> batchErrors = new ConcurrentHashMap<>();
do {
Map<String, Object> window = Util.map("_count", updates, "_total", total);
updates = Util.getFuture(Pools.SCHEDULED.submit(() -> {
batches.incrementAndGet();
try {
return executeNumericResultStatement(statement, merge(window, params));
} catch(Exception e) {
failedBatches.incrementAndGet();
recordError(batchErrors, e);
return 0L;
}
}), commitErrors, failedCommits, 0L);
total += updates;
if (updates > 0) executions++;
} while (updates > 0);
long timeTaken = TimeUnit.NANOSECONDS.toSeconds(nanoTime() - start);
return Stream.of(new RundownResult(total,executions, timeTaken, batches.get(),failedBatches.get(),batchErrors, failedCommits.get(), commitErrors));
}
private void recordError(Map<String, Long> executionErrors, Exception e) {
executionErrors.compute(getMessages(e),(s, i) -> i == null ? 1 : i + 1);
}
private String getMessages(Throwable e) {
Set<String> errors = new LinkedHashSet<>();
do {
errors.add(e.getMessage());
e = e.getCause();
} while (e.getCause() != null && !e.getCause().equals(e));
return String.join("\n",errors);
}
public static class RundownResult {
public final long updates;
public final long executions;
public final long runtime;
public final long batches;
public final long failedBatches;
public final Map<String, Long> batchErrors;
public final long failedCommits;
public final Map<String, Long> commitErrors;
public RundownResult(long total, long executions, long timeTaken, long batches, long failedBatches, Map<String, Long> batchErrors, long failedCommits, Map<String, Long> commitErrors) {
this.updates = total;
this.executions = executions;
this.runtime = timeTaken;
this.batches = batches;
this.failedBatches = failedBatches;
this.batchErrors = batchErrors;
this.failedCommits = failedCommits;
this.commitErrors = commitErrors;
}
}
private long executeNumericResultStatement(@Name("statement") String statement, @Name("params") Map<String, Object> parameters) {
long sum = 0;
try (Result result = db.execute(statement, parameters)) {
while (result.hasNext()) {
Collection<Object> row = result.next().values();
for (Object value : row) {
if (value instanceof Number) {
sum += ((Number)value).longValue();
}
}
}
}
return sum;
}
@Procedure
@Description("apoc.periodic.cancel(name) - cancel job with the given name")
public Stream<JobInfo> cancel(@Name("name") String name) {
JobInfo info = new JobInfo(name);
Future future = list.remove(info);
if (future != null) {
future.cancel(true);
return Stream.of(info.update(future));
}
return Stream.empty();
}
@Procedure
@Description("apoc.periodic.submit('name',statement) - submit a one-off background statement")
public Stream<JobInfo> submit(@Name("name") String name, @Name("statement") String statement) {
JobInfo info = submit(name, () -> {
try {
Iterators.count(db.execute(statement));
} catch(Exception e) {
throw new RuntimeException(e);
}
});
return Stream.of(info);
}
@Procedure
@Description("apoc.periodic.repeat('name',statement,repeat-rate-in-seconds) submit a repeatedly-called background statement")
public Stream<JobInfo> repeat(@Name("name") String name, @Name("statement") String statement, @Name("rate") long rate) {
JobInfo info = schedule(name, () -> Iterators.count(db.execute(statement)),0,rate);
return Stream.of(info);
}
@Procedure(mode = Mode.WRITE)
@Description("apoc.periodic.countdown('name',statement,repeat-rate-in-seconds) submit a repeatedly-called background statement until it returns 0")
public Stream<JobInfo> countdown(@Name("name") String name, @Name("statement") String statement, @Name("rate") long rate) {
JobInfo info = submit(name, new Countdown(name, statement, rate));
info.rate = rate;
return Stream.of(info);
}
/**
* Call from a procedure that gets a <code>@Context GraphDatbaseAPI db;</code> injected and provide that db to the runnable.
*/
public static <T> JobInfo submit(String name, Runnable task) {
JobInfo info = new JobInfo(name);
Future<T> future = list.remove(info);
if (future != null && !future.isDone()) future.cancel(false);
Future newFuture = Pools.SCHEDULED.submit(task);
list.put(info,newFuture);
return info;
}
/**
* Call from a procedure that gets a <code>@Context GraphDatbaseAPI db;</code> injected and provide that db to the runnable.
*/
public static JobInfo schedule(String name, Runnable task, long delay, long repeat) {
JobInfo info = new JobInfo(name,delay,repeat);
Future future = list.remove(info);
if (future != null && !future.isDone()) future.cancel(false);
ScheduledFuture<?> newFuture = Pools.SCHEDULED.scheduleWithFixedDelay(task, delay, repeat, TimeUnit.SECONDS);
list.put(info,newFuture);
return info;
}
/**
* as long as cypherLoop does not return 0, null, false, or the empty string as 'value' do:
*
* invoke cypherAction in batched transactions being feeded from cypherIteration running in main thread
*
* @param cypherLoop
* @param cypherIterate
* @param cypherAction
* @param batchSize
*/
@Procedure(mode = Mode.WRITE)
@Description("apoc.periodic.rock_n_roll_while('some cypher for knowing when to stop', 'some cypher for iteration', 'some cypher as action on each iteration', 10000) YIELD batches, total - run the action statement in batches over the iterator statement's results in a separate thread. Returns number of batches and total processed rows")
public Stream<LoopingBatchAndTotalResult> rock_n_roll_while(
@Name("cypherLoop") String cypherLoop,
@Name("cypherIterate") String cypherIterate,
@Name("cypherAction") String cypherAction,
@Name("batchSize") long batchSize) {
Stream<LoopingBatchAndTotalResult> allResults = Stream.empty();
Map<String,Object> loopParams = new HashMap<>(1);
Object value = null;
while (true) {
loopParams.put("previous", value);
try (Result result = db.execute(cypherLoop, loopParams)) {
value = result.next().get("loop");
if (!Util.toBoolean(value)) return allResults;
}
log.info("starting batched operation using iteration `%s` in separate thread", cypherIterate);
try (Result result = db.execute(cypherIterate)) {
Stream<BatchAndTotalResult> oneResult =
iterateAndExecuteBatchedInSeparateThread((int) batchSize, false, false,0, result, params -> db.execute(cypherAction, params));
final Object loopParam = value;
allResults = Stream.concat(allResults, oneResult.map(r -> r.inLoop(loopParam)));
}
}
}
/**
* invoke cypherAction in batched transactions being feeded from cypherIteration running in main thread
* @param cypherIterate
* @param cypherAction
*/
@Procedure(mode = Mode.WRITE)
@Description("apoc.periodic.iterate('statement returning items', 'statement per item', {batchSize:1000,iterateList:false,parallel:true}) YIELD batches, total - run the second statement for each item returned by the first statement. Returns number of batches and total processed rows")
public Stream<BatchAndTotalResult> iterate(
@Name("cypherIterate") String cypherIterate,
@Name("cypherAction") String cypherAction,
@Name("config") Map<String,Object> config) {
long batchSize = Util.toLong(config.getOrDefault("batchSize", 10000));
boolean parallel = Util.toBoolean(config.getOrDefault("parallel", false));
boolean iterateList = Util.toBoolean(config.getOrDefault("iterateList", false));
long retries = Util.toLong(config.getOrDefault("retries", 0)); // todo sleep/delay or push to end of batch to try again or immediate ?
Map<String,Object> params = (Map)config.getOrDefault("params", Collections.emptyMap());
try (Result result = db.execute(cypherIterate,params)) {
String innerStatement = prepareInnerStatement(cypherAction, iterateList, result.columns(), "_batch");
log.info("starting batching from `%s` operation using iteration `%s` in separate thread", cypherIterate,cypherAction);
return iterateAndExecuteBatchedInSeparateThread((int)batchSize, parallel, iterateList, retries, result, (p) -> db.execute(innerStatement, merge(params, p)).close());
}
}
public long retry(Consumer<Map<String, Object>> executor, Map<String, Object> params, long retry, long maxRetries) {
try {
executor.accept(merge(params, singletonMap("_retry", retry)));
return retry;
} catch (Exception e) {
if (retry >= maxRetries) throw e;
log.warn("Retrying operation "+retry+" of "+maxRetries);
Util.sleep(100);
return retry(executor, params, retry + 1, maxRetries);
}
}
static Pattern CONTAINS_PARAM_MAPPING = Pattern.compile("(WITH|UNWIND)\\s*[{$]",Pattern.CASE_INSENSITIVE|Pattern.MULTILINE|Pattern.DOTALL);
public String prepareInnerStatement(String cypherAction, boolean iterateList, List<String> columns, String iterator) {
if (CONTAINS_PARAM_MAPPING.matcher(cypherAction).find()) return cypherAction;
if (iterateList) {
String with = Util.withMapping(columns.stream(), (c) -> Util.quote(iterator) + "." + Util.quote(c) + " AS " + Util.quote(c));
return "UNWIND "+ Util.param(iterator)+" AS "+ Util.quote(iterator) + with + " " + cypherAction;
}
return Util.withMapping(columns.stream(), (c) -> Util.param(c) + " AS " + Util.quote(c)) + cypherAction;
}
@Procedure(mode = Mode.WRITE)
@Description("apoc.periodic.rock_n_roll('some cypher for iteration', 'some cypher as action on each iteration', 10000) YIELD batches, total - run the action statement in batches over the iterator statement's results in a separate thread. Returns number of batches and total processed rows")
public Stream<BatchAndTotalResult> rock_n_roll(
@Name("cypherIterate") String cypherIterate,
@Name("cypherAction") String cypherAction,
@Name("batchSize") long batchSize) {
log.info("starting batched operation using iteration `%s` in separate thread", cypherIterate);
try (Result result = db.execute(cypherIterate)) {
return iterateAndExecuteBatchedInSeparateThread((int)batchSize, false, false, 0, result, p -> db.execute(cypherAction, p).close());
}
}
private Stream<BatchAndTotalResult> iterateAndExecuteBatchedInSeparateThread(int batchsize, boolean parallel, boolean iterateList, long retries,
Iterator<Map<String,Object>> iterator, Consumer<Map<String,Object>> consumer) {
ExecutorService pool = parallel ? Pools.DEFAULT : Pools.SINGLE;
List<Future<Long>> futures = new ArrayList<>(1000);
long batches = 0;
long start = System.nanoTime();
AtomicLong count = new AtomicLong();
AtomicInteger failedOps = new AtomicInteger();
AtomicLong retried = new AtomicLong();
Map<String,Long> operationErrors = new ConcurrentHashMap<>();
do {
if (log.isDebugEnabled()) log.debug("execute in batch no " + batches + " batch size " + batchsize);
List<Map<String,Object>> batch = Util.take(iterator, batchsize);
long currentBatchSize = batch.size();
Callable<Long> task;
if (iterateList) {
task = () -> {
long c = count.addAndGet(currentBatchSize);
List<Map<String,Object>> batchLocal = batch;
try {
Map<String, Object> params = Util.map("_count", c, "_batch", batchLocal);
retried.addAndGet(retry(consumer,params,0,retries));
} catch (Exception e) {
failedOps.addAndGet(batchsize);
recordError(operationErrors, e);
}
return currentBatchSize;
};
} else {
task = () -> batch.stream().map(
p -> {
long c = count.incrementAndGet();
List<Map<String,Object>> batchLocal = batch;
try {
Map<String, Object> params = merge(p, Util.map("_count", c, "_batch", batchLocal));
retried.addAndGet(retry(consumer,params,0,retries));
} catch (Exception e) {
failedOps.incrementAndGet();
recordError(operationErrors, e);
}
return 1;
}).mapToLong(l -> l).sum();
}
futures.add(Util.inTxFuture(pool, db, task));
batches++;
} while (iterator.hasNext());
AtomicInteger failedBatches = new AtomicInteger();
Map<String,Long> batchErrors = new HashMap<>();
long successes = futures.stream().mapToLong(f -> Util.getFuture(f, batchErrors, failedBatches, 0L)).sum();
Util.logErrors("Error during iterate.commit:", batchErrors, log);
Util.logErrors("Error during iterate.execute:", operationErrors, log);
long timeTaken = TimeUnit.NANOSECONDS.toSeconds(System.nanoTime() - start);
BatchAndTotalResult result =
new BatchAndTotalResult(batches, count.get(), timeTaken, successes, failedOps.get(), failedBatches.get(), retried.get(), operationErrors, batchErrors);
return Stream.of(result);
}
public static class BatchAndTotalResult {
public final long batches;
public final long total;
public final long timeTaken;
public final long committedOperations;
public final long failedOperations;
public final long failedBatches;
public final long retries;
public final Map<String,Long> errorMessages;
public final Map<String,Object> batch;
public final Map<String,Object> operations;
public BatchAndTotalResult(long batches, long total, long timeTaken, long committedOperations, long failedOperations, long failedBatches,long retries, Map<String, Long> operationErrors, Map<String, Long> batchErrors) {
this.batches = batches;
this.total = total;
this.timeTaken = timeTaken;
this.committedOperations = committedOperations;
this.failedOperations = failedOperations;
this.failedBatches = failedBatches;
this.retries = retries;
this.errorMessages = operationErrors;
this.batch = Util.map("total",batches,"failed",failedBatches,"committed",batches-failedBatches,"errors",batchErrors);
this.operations = Util.map("total",total,"failed",failedOperations,"committed", committedOperations,"errors",operationErrors);
}
public LoopingBatchAndTotalResult inLoop(Object loop) {
return new LoopingBatchAndTotalResult(loop, batches, total);
}
}
public static class LoopingBatchAndTotalResult {
public Object loop;
public long batches;
public long total;
public LoopingBatchAndTotalResult(Object loop, long batches, long total) {
this.loop = loop;
this.batches = batches;
this.total = total;
}
}
/**
* Call from a procedure that gets a <code>@Context GraphDatbaseAPI db;</code> injected and provide that db to the runnable.
*/
public static JobInfo schedule(String name, Runnable task, long delay) {
JobInfo info = new JobInfo(name,delay,0);
Future future = list.remove(info);
if (future != null) future.cancel(false);
ScheduledFuture<?> newFuture = Pools.SCHEDULED.schedule(task, delay, TimeUnit.SECONDS);
list.put(info,newFuture);
return info;
}
public static class JobInfo {
public final String name;
public long delay;
public long rate;
public boolean done;
public boolean cancelled;
public JobInfo(String name) {
this.name = name;
}
public JobInfo(String name, long delay, long rate) {
this.name = name;
this.delay = delay;
this.rate = rate;
}
public JobInfo update(Future future) {
this.done = future.isDone();
this.cancelled = future.isCancelled();
return this;
}
@Override
public boolean equals(Object o) {
return this == o || o instanceof JobInfo && name.equals(((JobInfo) o).name);
}
@Override
public int hashCode() {
return name.hashCode();
}
}
private class Countdown implements Runnable {
private final String name;
private final String statement;
private final long rate;
public Countdown(String name, String statement, long rate) {
this.name = name;
this.statement = statement;
this.rate = rate;
}
@Override
public void run() {
if (Periodic.this.executeNumericResultStatement(statement, Collections.emptyMap()) > 0) {
Pools.SCHEDULED.schedule(() -> submit(name, this), rate, TimeUnit.SECONDS);
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.search.SortField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.LatLonPointFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.GeoValidationMethod;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.test.geo.RandomGeoGenerator;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanceSortBuilder> {
@Override
protected GeoDistanceSortBuilder createTestItem() {
return randomGeoDistanceSortBuilder();
}
public static GeoDistanceSortBuilder randomGeoDistanceSortBuilder() {
String fieldName = randomAlphaOfLengthBetween(1, 10);
GeoDistanceSortBuilder result = null;
int id = randomIntBetween(0, 2);
switch(id) {
case 0:
int count = randomIntBetween(1, 10);
String[] geohashes = new String[count];
for (int i = 0; i < count; i++) {
geohashes[i] = RandomGeoGenerator.randomPoint(random()).geohash();
}
result = new GeoDistanceSortBuilder(fieldName, geohashes);
break;
case 1:
GeoPoint pt = RandomGeoGenerator.randomPoint(random());
result = new GeoDistanceSortBuilder(fieldName, pt.getLat(), pt.getLon());
break;
case 2:
result = new GeoDistanceSortBuilder(fieldName, points(new GeoPoint[0]));
break;
default:
throw new IllegalStateException("one of three geo initialisation strategies must be used");
}
if (randomBoolean()) {
result.geoDistance(geoDistance(result.geoDistance()));
}
if (randomBoolean()) {
result.unit(randomValueOtherThan(result.unit(), () -> randomFrom(DistanceUnit.values())));
}
if (randomBoolean()) {
result.order(randomFrom(SortOrder.values()));
}
if (randomBoolean()) {
result.sortMode(randomValueOtherThan(SortMode.SUM, () -> randomFrom(SortMode.values())));
}
if (randomBoolean()) {
result.setNestedFilter(new MatchAllQueryBuilder());
}
if (randomBoolean()) {
result.setNestedPath(
randomValueOtherThan(
result.getNestedPath(),
() -> randomAlphaOfLengthBetween(1, 10)));
}
if (randomBoolean()) {
result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values())));
}
return result;
}
@Override
protected MappedFieldType provideMappedFieldType(String name) {
MappedFieldType clone = LatLonPointFieldMapper.Defaults.FIELD_TYPE.clone();
clone.setName(name);
return clone;
}
private static GeoPoint[] points(GeoPoint[] original) {
GeoPoint[] result = null;
while (result == null || Arrays.deepEquals(original, result)) {
int count = randomIntBetween(1, 10);
result = new GeoPoint[count];
for (int i = 0; i < count; i++) {
result[i] = RandomGeoGenerator.randomPoint(random());
}
}
return result;
}
private static GeoDistance geoDistance(GeoDistance original) {
int id = -1;
while (id == -1 || (original != null && original.ordinal() == id)) {
id = randomIntBetween(0, GeoDistance.values().length - 1);
}
return GeoDistance.values()[id];
}
@Override
protected GeoDistanceSortBuilder mutate(GeoDistanceSortBuilder original) throws IOException {
GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(original);
int parameter = randomIntBetween(0, 8);
switch (parameter) {
case 0:
while (Arrays.deepEquals(original.points(), result.points())) {
GeoPoint pt = RandomGeoGenerator.randomPoint(random());
result.point(pt.getLat(), pt.getLon());
}
break;
case 1:
result.points(points(original.points()));
break;
case 2:
result.geoDistance(geoDistance(original.geoDistance()));
break;
case 3:
result.unit(randomValueOtherThan(result.unit(), () -> randomFrom(DistanceUnit.values())));
break;
case 4:
result.order(randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values())));
break;
case 5:
result.sortMode(randomValueOtherThanMany(
Arrays.asList(SortMode.SUM, result.sortMode())::contains,
() -> randomFrom(SortMode.values())));
break;
case 6:
result.setNestedFilter(randomValueOtherThan(
original.getNestedFilter(),
() -> randomNestedFilter()));
break;
case 7:
result.setNestedPath(randomValueOtherThan(
result.getNestedPath(),
() -> randomAlphaOfLengthBetween(1, 10)));
break;
case 8:
result.validation(randomValueOtherThan(result.validation(), () -> randomFrom(GeoValidationMethod.values())));
break;
}
return result;
}
@Override
protected void sortFieldAssertions(GeoDistanceSortBuilder builder, SortField sortField, DocValueFormat format) throws IOException {
assertEquals(builder.order() == SortOrder.ASC ? false : true, sortField.getReverse());
assertEquals(builder.fieldName(), sortField.getField());
}
public void testSortModeSumIsRejectedInSetter() {
GeoDistanceSortBuilder builder = new GeoDistanceSortBuilder("testname", -1, -1);
GeoPoint point = RandomGeoGenerator.randomPoint(random());
builder.point(point.getLat(), point.getLon());
try {
builder.sortMode(SortMode.SUM);
fail("sort mode sum should not be supported");
} catch (IllegalArgumentException e) {
// all good
}
}
public void testReverseOptionFailsWhenNonStringField() throws IOException {
String json = "{\n" +
" \"testname\" : [ {\n" +
" \"lat\" : -6.046997540714173,\n" +
" \"lon\" : -51.94128329747579\n" +
" } ],\n" +
" \"reverse\" : true\n" +
"}";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
try {
GeoDistanceSortBuilder.fromXContent(context, "");
fail("adding reverse sorting option should fail with an exception");
} catch (ParsingException e) {
assertEquals("Only geohashes of type string supported for field [reverse]", e.getMessage());
}
}
public void testReverseOptionFailsWhenStringFieldButResetting() throws IOException {
String json = "{\n" +
" \"testname\" : [ {\n" +
" \"lat\" : -6.046997540714173,\n" +
" \"lon\" : -51.94128329747579\n" +
" } ],\n" +
" \"reverse\" : \"true\"\n" +
"}";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
try {
GeoDistanceSortBuilder.fromXContent(context, "");
fail("adding reverse sorting option should fail with an exception");
} catch (ParsingException e) {
assertEquals("Trying to reset fieldName to [reverse], already set to [testname].", e.getMessage());
}
}
public void testReverseOptionFailsBuildWhenInvalidGeoHashString() throws IOException {
String json = "{\n" +
" \"reverse\" : \"false\"\n" +
"}";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
try {
GeoDistanceSortBuilder item = GeoDistanceSortBuilder.fromXContent(context, "");
item.validation(GeoValidationMethod.STRICT);
item.build(createMockShardContext());
fail("adding reverse sorting option should fail with an exception");
} catch (ElasticsearchParseException e) {
assertEquals("illegal latitude value [269.384765625] for [GeoDistanceSort] for field [reverse].", e.getMessage());
}
}
public void testCoerceIsDeprecated() throws IOException {
String json = "{\n" +
" \"testname\" : [ {\n" +
" \"lat\" : -6.046997540714173,\n" +
" \"lon\" : -51.94128329747579\n" +
" } ],\n" +
" \"unit\" : \"m\",\n" +
" \"distance_type\" : \"arc\",\n" +
" \"mode\" : \"MAX\",\n" +
" \"coerce\" : true\n" +
"}";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
GeoDistanceSortBuilder.fromXContent(context, "");
assertWarnings("Deprecated field [coerce] used, replaced by [validation_method]");
}
public void testIgnoreMalformedIsDeprecated() throws IOException {
String json = "{\n" +
" \"testname\" : [ {\n" +
" \"lat\" : -6.046997540714173,\n" +
" \"lon\" : -51.94128329747579\n" +
" } ],\n" +
" \"unit\" : \"m\",\n" +
" \"distance_type\" : \"arc\",\n" +
" \"mode\" : \"MAX\",\n" +
" \"ignore_malformed\" : true\n" +
"}";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
GeoDistanceSortBuilder.fromXContent(context, "");
assertWarnings("Deprecated field [ignore_malformed] used, replaced by [validation_method]");
}
public void testSortModeSumIsRejectedInJSON() throws IOException {
String json = "{\n" +
" \"testname\" : [ {\n" +
" \"lat\" : -6.046997540714173,\n" +
" \"lon\" : -51.94128329747579\n" +
" } ],\n" +
" \"unit\" : \"m\",\n" +
" \"distance_type\" : \"arc\",\n" +
" \"mode\" : \"SUM\"\n" +
"}";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(context, ""));
assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage());
}
public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException {
String json = "{\n" +
" \"VDcvDuFjE\" : [ \"7umzzv8eychg\", \"dmdgmt5z13uw\", " +
" \"ezu09wxw6v4c\", \"kc7s3515p6k6\", \"jgeuvjwrmfzn\", \"kcpcfj7ruyf8\" ],\n" +
" \"unit\" : \"m\",\n" +
" \"distance_type\" : \"arc\",\n" +
" \"mode\" : \"MAX\",\n" +
" \"nested_filter\" : {\n" +
" \"ids\" : {\n" +
" \"type\" : [ ],\n" +
" \"values\" : [ ],\n" +
" \"boost\" : 5.711116\n" +
" }\n" +
" },\n" +
" \"validation_method\" : \"STRICT\"\n" +
" }";
XContentParser itemParser = createParser(JsonXContent.jsonXContent, json);
itemParser.nextToken();
QueryParseContext context = new QueryParseContext(itemParser);
GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(context, json);
assertEquals("[-19.700583312660456, -2.8225036337971687, "
+ "31.537466906011105, -74.63590376079082, "
+ "43.71844606474042, -5.548660643398762, "
+ "-37.20467280596495, 38.71751043945551, "
+ "-69.44606635719538, 84.25200328230858, "
+ "-39.03717711567879, 44.74099852144718]", Arrays.toString(result.points()));
}
public void testGeoDistanceSortParserManyPointsNoException() throws Exception {
XContentBuilder sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.startArray().value(1.2).value(3).endArray().startArray().value(5).value(6).endArray();
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.value(new GeoPoint(1.2, 3)).value(new GeoPoint(1.2, 3));
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.value("1,2").value("3,4");
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.value("s3y0zh7w1z0g").value("s6wjr4et3f8v");
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.value(1.2).value(3);
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.field("location", new GeoPoint(1, 2));
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.field("location", "1,2");
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.field("location", "s3y0zh7w1z0g");
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.value(new GeoPoint(1, 2)).value("s3y0zh7w1z0g").startArray().value(1).value(2).endArray().value("1,2");
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
}
public void testGeoDistanceSortDeprecatedSortModeException() throws Exception {
XContentBuilder sortBuilder = jsonBuilder();
sortBuilder.startObject();
sortBuilder.startArray("location");
sortBuilder.startArray().value(1.2).value(3).endArray().startArray().value(5).value(6).endArray();
sortBuilder.endArray();
sortBuilder.field("order", "desc");
sortBuilder.field("unit", "km");
sortBuilder.field("sort_mode", "max");
sortBuilder.endObject();
parse(sortBuilder);
assertWarnings("Deprecated field [sort_mode] used, expected [mode] instead");
}
private GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception {
XContentParser parser = createParser(sortBuilder);
QueryParseContext parseContext = new QueryParseContext(parser);
parser.nextToken();
return GeoDistanceSortBuilder.fromXContent(parseContext, null);
}
@Override
protected GeoDistanceSortBuilder fromXContent(QueryParseContext context, String fieldName) throws IOException {
return GeoDistanceSortBuilder.fromXContent(context, fieldName);
}
public void testCommonCaseIsOptimized() throws IOException {
// make sure the below tests test something...
assertFalse(SortField.class.equals(LatLonDocValuesField.newDistanceSort("random_field_name", 3.5, 2.1).getClass()));
QueryShardContext context = createMockShardContext();
// The common case should use LatLonDocValuesField.newDistanceSort
GeoDistanceSortBuilder builder = new GeoDistanceSortBuilder("", new GeoPoint(3.5, 2.1));
SortFieldAndFormat sort = builder.build(context);
assertEquals(LatLonDocValuesField.newDistanceSort("random_field_name", 3.5, 2.1).getClass(), sort.field.getClass());
// however this might be disabled by fancy options
builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1), new GeoPoint(3.0, 4));
sort = builder.build(context);
assertEquals(SortField.class, sort.field.getClass()); // 2 points -> plain SortField with a custom comparator
builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1));
builder.unit(DistanceUnit.KILOMETERS);
sort = builder.build(context);
assertEquals(SortField.class, sort.field.getClass()); // km rather than m -> plain SortField with a custom comparator
builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1));
builder.order(SortOrder.DESC);
sort = builder.build(context);
assertEquals(SortField.class, sort.field.getClass()); // descending means the max value should be considered rather than min
builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1));
builder.setNestedPath("some_nested_path");
sort = builder.build(context);
assertEquals(SortField.class, sort.field.getClass()); // can't use LatLon optimized sorting with nested fields
builder = new GeoDistanceSortBuilder("random_field_name", new GeoPoint(3.5, 2.1));
builder.order(SortOrder.DESC);
sort = builder.build(context);
assertEquals(SortField.class, sort.field.getClass()); // can't use LatLon optimized sorting with DESC sorting
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.converter.crypto;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.NoSuchProviderException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.util.IOHelper;
import org.bouncycastle.bcpg.sig.KeyFlags;
import org.bouncycastle.openpgp.PGPException;
import org.bouncycastle.openpgp.PGPPrivateKey;
import org.bouncycastle.openpgp.PGPPublicKey;
import org.bouncycastle.openpgp.PGPPublicKeyRing;
import org.bouncycastle.openpgp.PGPPublicKeyRingCollection;
import org.bouncycastle.openpgp.PGPSecretKey;
import org.bouncycastle.openpgp.PGPSecretKeyRing;
import org.bouncycastle.openpgp.PGPSecretKeyRingCollection;
import org.bouncycastle.openpgp.PGPSignature;
import org.bouncycastle.openpgp.PGPSignatureSubpacketVector;
import org.bouncycastle.openpgp.PGPUtil;
import org.bouncycastle.openpgp.operator.bc.BcKeyFingerprintCalculator;
import org.bouncycastle.openpgp.operator.jcajce.JcePBESecretKeyDecryptorBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.bouncycastle.bcpg.PublicKeyAlgorithmTags.DSA;
import static org.bouncycastle.bcpg.PublicKeyAlgorithmTags.ECDSA;
import static org.bouncycastle.bcpg.PublicKeyAlgorithmTags.ELGAMAL_GENERAL;
import static org.bouncycastle.bcpg.PublicKeyAlgorithmTags.RSA_GENERAL;
import static org.bouncycastle.bcpg.PublicKeyAlgorithmTags.RSA_SIGN;
public final class PGPDataFormatUtil {
private static final Logger LOG = LoggerFactory.getLogger(PGPDataFormatUtil.class);
private PGPDataFormatUtil() {
}
public static List<PGPPublicKey> findPublicKeys(CamelContext context, String filename, byte[] keyRing, List<String> userids,
boolean forEncryption) throws IOException, PGPException, NoSuchProviderException {
InputStream is = determineKeyRingInputStream(context, filename, keyRing, forEncryption);
try {
return findPublicKeys(is, userids, forEncryption);
} finally {
IOHelper.close(is);
}
}
public static PGPPublicKeyRingCollection getPublicKeyRingCollection(CamelContext context, String filename, byte[] keyRing, boolean forEncryption) throws IOException, PGPException {
InputStream is = determineKeyRingInputStream(context, filename, keyRing, forEncryption);
try {
return new PGPPublicKeyRingCollection(PGPUtil.getDecoderStream(is), new BcKeyFingerprintCalculator());
} finally {
IOHelper.close(is);
}
}
public static PGPPrivateKey findPrivateKeyWithKeyId(CamelContext context, String filename, byte[] secretKeyRing, long keyid,
String passphrase, PGPPassphraseAccessor passpraseAccessor, String provider) throws IOException, PGPException,
NoSuchProviderException {
InputStream is = determineKeyRingInputStream(context, filename, secretKeyRing, true);
try {
return findPrivateKeyWithKeyId(is, keyid, passphrase, passpraseAccessor, provider);
} finally {
IOHelper.close(is);
}
}
private static PGPPrivateKey findPrivateKeyWithKeyId(InputStream keyringInput, long keyid, String passphrase,
PGPPassphraseAccessor passphraseAccessor, String provider) throws IOException, PGPException {
PGPSecretKeyRingCollection pgpSec = new PGPSecretKeyRingCollection(PGPUtil.getDecoderStream(keyringInput),
new BcKeyFingerprintCalculator());
return findPrivateKeyWithkeyId(keyid, passphrase, passphraseAccessor, provider, pgpSec);
}
public static PGPPrivateKey findPrivateKeyWithkeyId(long keyid, String passphrase, PGPPassphraseAccessor passphraseAccessor,
String provider, PGPSecretKeyRingCollection pgpSec) throws PGPException {
for (Iterator<?> i = pgpSec.getKeyRings(); i.hasNext();) {
Object data = i.next();
if (data instanceof PGPSecretKeyRing) {
PGPSecretKeyRing keyring = (PGPSecretKeyRing) data;
PGPSecretKey secKey = keyring.getSecretKey(keyid);
if (secKey != null) {
if (passphrase == null && passphraseAccessor != null) {
// get passphrase from accessor // only primary/master key has user IDS
@SuppressWarnings("unchecked")
Iterator<String> userIDs = keyring.getSecretKey().getUserIDs();
while (passphrase == null && userIDs.hasNext()) {
passphrase = passphraseAccessor.getPassphrase(userIDs.next());
}
}
if (passphrase != null) {
PGPPrivateKey privateKey = secKey.extractPrivateKey(new JcePBESecretKeyDecryptorBuilder().setProvider(provider)
.build(passphrase.toCharArray()));
if (privateKey != null) {
return privateKey;
}
}
if (passphrase == null && passphraseAccessor == null) {
passphrase = "";
PGPPrivateKey privateKey = secKey.extractPrivateKey(new JcePBESecretKeyDecryptorBuilder().setProvider(provider)
.build(passphrase.toCharArray()));
if (privateKey != null) {
return privateKey;
}
}
}
}
}
return null;
}
private static InputStream determineKeyRingInputStream(CamelContext context, String filename, byte[] keyRing, boolean forEncryption)
throws IOException {
if (filename != null && keyRing != null) {
String encryptionOrSignature;
if (forEncryption) {
encryptionOrSignature = "encryption";
} else {
encryptionOrSignature = "signature";
}
throw new IllegalStateException(String.format("Either specify %s file name or key ring byte array. You can not specify both.",
encryptionOrSignature));
}
InputStream is;
if (keyRing != null) {
is = new ByteArrayInputStream(keyRing);
} else {
is = ResourceHelper.resolveMandatoryResourceAsInputStream(context, filename);
}
return is;
}
private static List<PGPPublicKey> findPublicKeys(InputStream input, List<String> userids, boolean forEncryption) throws IOException,
PGPException, NoSuchProviderException {
PGPPublicKeyRingCollection pgpSec = new PGPPublicKeyRingCollection(PGPUtil.getDecoderStream(input),
new BcKeyFingerprintCalculator());
return findPublicKeys(userids, forEncryption, pgpSec);
}
public static List<PGPPublicKey> findPublicKeys(List<String> useridParts, boolean forEncryption, PGPPublicKeyRingCollection pgpPublicKeyringCollection) {
List<PGPPublicKey> result = new ArrayList<>(useridParts.size());
for (Iterator<PGPPublicKeyRing> keyRingIter = pgpPublicKeyringCollection.getKeyRings(); keyRingIter.hasNext();) {
PGPPublicKeyRing keyRing = keyRingIter.next();
PGPPublicKey primaryKey = keyRing.getPublicKey();
String[] foundKeyUserIdForUserIdPart = findFirstKeyUserIdContainingOneOfTheParts(useridParts, primaryKey);
if (foundKeyUserIdForUserIdPart == null) {
LOG.debug("No User ID found in primary key with key ID {} containing one of the parts {}", primaryKey.getKeyID(),
useridParts);
continue;
}
LOG.debug("User ID {} found in primary key with key ID {} containing one of the parts {}",
foundKeyUserIdForUserIdPart[0], primaryKey.getKeyID(), useridParts);
// add adequate keys to the result
for (Iterator<PGPPublicKey> keyIter = keyRing.getPublicKeys(); keyIter.hasNext();) {
PGPPublicKey key = keyIter.next();
if (forEncryption) {
if (isEncryptionKey(key)) {
LOG.debug("Public encryption key with key user ID {} and key ID {} added to the encryption keys",
foundKeyUserIdForUserIdPart[0], Long.toString(key.getKeyID()));
result.add(key);
}
} else if (!forEncryption && isSignatureKey(key)) {
// not used!
result.add(key);
LOG.debug("Public key with key user ID {} and key ID {} added to the signing keys", foundKeyUserIdForUserIdPart[0],
Long.toString(key.getKeyID()));
}
}
}
return result;
}
private static boolean isEncryptionKey(PGPPublicKey key) {
if (!key.isEncryptionKey()) {
return false;
}
//check keyflags
Boolean hasEncryptionKeyFlags = hasOneOfExpectedKeyFlags(key, new int[] {KeyFlags.ENCRYPT_COMMS, KeyFlags.ENCRYPT_STORAGE });
if (hasEncryptionKeyFlags != null && !hasEncryptionKeyFlags) {
LOG.debug(
"Public key with key key ID {} found for specified user ID. But this key will not be used for the encryption, because its key flags are not encryption key flags.",
Long.toString(key.getKeyID()));
return false;
} else {
// also without keyflags (hasEncryptionKeyFlags = null), true is returned!
return true;
}
}
// Within a public keyring, the master / primary key has the user ID(s); the subkeys don't
// have user IDs associated directly to them, but the subkeys are implicitly associated with
// the user IDs of the master / primary key. The master / primary key is the first key in
// the keyring, and the rest of the keys are subkeys.
// http://bouncy-castle.1462172.n4.nabble.com/How-to-find-PGP-subkeys-td1465289.html
private static String[] findFirstKeyUserIdContainingOneOfTheParts(List<String> useridParts, PGPPublicKey primaryKey) {
String[] foundKeyUserIdForUserIdPart = null;
for (@SuppressWarnings("unchecked")
Iterator<String> iterator = primaryKey.getUserIDs(); iterator.hasNext();) {
String keyUserId = iterator.next();
for (String userIdPart : useridParts) {
if (keyUserId.contains(userIdPart)) {
foundKeyUserIdForUserIdPart = new String[] {keyUserId, userIdPart };
}
}
}
return foundKeyUserIdForUserIdPart;
}
private static boolean isSignatureKey(PGPPublicKey key) {
int algorithm = key.getAlgorithm();
return algorithm == RSA_GENERAL || algorithm == RSA_SIGN || algorithm == DSA || algorithm == ECDSA || algorithm == ELGAMAL_GENERAL;
}
public static List<PGPSecretKeyAndPrivateKeyAndUserId> findSecretKeysWithPrivateKeyAndUserId(CamelContext context,
String keychainFilename, byte[] secKeyRing, Map<String, String> sigKeyUserId2Password, String provider) throws IOException,
PGPException, NoSuchProviderException {
InputStream keyChainInputStream = determineKeyRingInputStream(context, keychainFilename, secKeyRing, false);
try {
return findSecretKeysWithPrivateKeyAndUserId(keyChainInputStream, sigKeyUserId2Password, provider);
} finally {
IOHelper.close(keyChainInputStream);
}
}
private static List<PGPSecretKeyAndPrivateKeyAndUserId> findSecretKeysWithPrivateKeyAndUserId(InputStream keyringInput,
Map<String, String> sigKeyUserId2Password, String provider) throws IOException, PGPException, NoSuchProviderException {
PGPSecretKeyRingCollection pgpSec =
new PGPSecretKeyRingCollection(PGPUtil.getDecoderStream(keyringInput),
new BcKeyFingerprintCalculator());
return findSecretKeysWithPrivateKeyAndUserId(sigKeyUserId2Password, provider, pgpSec);
}
public static List<PGPSecretKeyAndPrivateKeyAndUserId> findSecretKeysWithPrivateKeyAndUserId(Map<String, String> sigKeyUserId2Password,
String provider, PGPSecretKeyRingCollection pgpSec) throws PGPException {
List<PGPSecretKeyAndPrivateKeyAndUserId> result = new ArrayList<>(sigKeyUserId2Password.size());
for (Iterator<?> i = pgpSec.getKeyRings(); i.hasNext();) {
Object data = i.next();
if (data instanceof PGPSecretKeyRing) {
PGPSecretKeyRing keyring = (PGPSecretKeyRing) data;
PGPSecretKey primaryKey = keyring.getSecretKey();
List<String> useridParts = new ArrayList<>(sigKeyUserId2Password.keySet());
String[] foundKeyUserIdForUserIdPart = findFirstKeyUserIdContainingOneOfTheParts(useridParts, primaryKey.getPublicKey());
if (foundKeyUserIdForUserIdPart == null) {
LOG.debug("No User ID found in primary key with key ID {} containing one of the parts {}", primaryKey.getKeyID(),
useridParts);
continue;
}
LOG.debug("User ID {} found in primary key with key ID {} containing one of the parts {}",
foundKeyUserIdForUserIdPart[0], primaryKey.getKeyID(), useridParts);
// add all signing keys
for (Iterator<PGPSecretKey> iterKey = keyring.getSecretKeys(); iterKey.hasNext();) {
PGPSecretKey secKey = iterKey.next();
if (isSigningKey(secKey)) {
PGPPrivateKey privateKey = secKey.extractPrivateKey(new JcePBESecretKeyDecryptorBuilder().setProvider(provider)
.build(sigKeyUserId2Password.get(foundKeyUserIdForUserIdPart[1]).toCharArray()));
if (privateKey != null) {
result.add(new PGPSecretKeyAndPrivateKeyAndUserId(secKey, privateKey, foundKeyUserIdForUserIdPart[0]));
LOG.debug("Private key with user ID {} and key ID {} added to the signing keys",
foundKeyUserIdForUserIdPart[0], Long.toString(privateKey.getKeyID()));
}
}
}
}
}
return result;
}
private static boolean isSigningKey(PGPSecretKey secKey) {
if (!secKey.isSigningKey()) {
return false;
}
Boolean hasSigningKeyFlag = hasOneOfExpectedKeyFlags(secKey.getPublicKey(), new int[] {KeyFlags.SIGN_DATA });
if (hasSigningKeyFlag != null && !hasSigningKeyFlag) {
// not a signing key --> ignore
LOG.debug(
"Secret key with key ID {} found for specified user ID part. But this key will not be used for signing because of its key flags.",
Long.toString(secKey.getKeyID()));
return false;
} else {
// also if there are not any keyflags (hasSigningKeyFlag=null), true is returned!
return true;
}
}
/**
* Checks whether one of the signatures of the key has one of the expected
* key flags
*
* @param key
* @return {@link Boolean#TRUE} if key has one of the expected flag,
* <code>null</code> if the key does not have any key flags,
* {@link Boolean#FALSE} if the key has none of the expected flags
*/
private static Boolean hasOneOfExpectedKeyFlags(PGPPublicKey key, int[] expectedKeyFlags) {
boolean containsKeyFlags = false;
for (@SuppressWarnings("unchecked")
Iterator<PGPSignature> itsig = key.getSignatures(); itsig.hasNext();) {
PGPSignature sig = itsig.next();
PGPSignatureSubpacketVector subPacks = sig.getHashedSubPackets();
if (subPacks != null) {
int keyFlag = subPacks.getKeyFlags();
if (keyFlag > 0 && !containsKeyFlags) {
containsKeyFlags = true;
}
for (int expectdKeyFlag : expectedKeyFlags) {
int result = keyFlag & expectdKeyFlag;
if (result == expectdKeyFlag) {
return Boolean.TRUE;
}
}
}
}
if (containsKeyFlags) {
return Boolean.FALSE;
}
return null; // no key flag
}
/**
* Determines a public key from the keyring collection which has a certain
* key ID and which has a User ID which contains at least one of the User ID
* parts.
*
* @param keyId
* key ID
* @param userIdParts
* user ID parts, can be empty, than no filter on the User ID is
* executed
* @param publicKeyringCollection
* keyring collection
* @return public key or <code>null</code> if no fitting key is found
* @throws PGPException
*/
@SuppressWarnings("unchecked")
public static PGPPublicKey getPublicKeyWithKeyIdAndUserID(long keyId, List<String> userIdParts, PGPPublicKeyRingCollection publicKeyringCollection)
throws PGPException {
PGPPublicKeyRing publicKeyring = publicKeyringCollection.getPublicKeyRing(keyId);
if (publicKeyring == null) {
LOG.debug("No public key found for key ID {}.", Long.toString(keyId));
return null;
}
// publicKey can be a subkey the user IDs must therefore be provided by the primary/master key
if (isAllowedKey(userIdParts, publicKeyring.getPublicKey().getUserIDs())) {
return publicKeyring.getPublicKey(keyId);
} else {
return null;
}
}
private static boolean isAllowedKey(List<String> allowedUserIds, Iterator<String> verifyingPublicKeyUserIds) {
if (allowedUserIds == null || allowedUserIds.isEmpty()) {
// no restrictions specified
return true;
}
String keyUserId = null;
for (; verifyingPublicKeyUserIds.hasNext();) {
keyUserId = verifyingPublicKeyUserIds.next();
for (String userid : allowedUserIds) {
if (keyUserId != null && keyUserId.contains(userid)) {
LOG.debug(
"Public key with user ID {} fulfills the User ID restriction.",
keyUserId, allowedUserIds);
return true;
}
}
}
LOG.warn(
"Public key with User ID {} does not fulfill the User ID restriction.",
keyUserId, allowedUserIds);
return false;
}
}
| |
package main;
import gui.LoginGUI;
import gui.SalonGUI;
import gui.config.ConfigGUI;
import gui.config.KeyConfigPnl;
import gui.game.PartyGUI;
import java.awt.FlowLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectOutput;
import java.io.ObjectOutputStream;
import java.util.Observable;
import java.util.Observer;
import javax.swing.ImageIcon;
import javax.swing.JApplet;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.UIManager;
import message.puyo.ActionPuyo;
import message.puyo.EMovePlateau;
import model.Party;
import model.Puyo.PuyoColor;
import model.players.IAPlayer;
import network.NetworkClient;
import network.NetworkClient.ENetworkClient;
import commun.Player;
import controller.ManageKeyboard;
public class Main extends JApplet implements Observer {
/**
*
*/
private static final long serialVersionUID = 1L;
private JMenuBar menuBar;
private JMenu mnGame;
private JMenuItem mntmPlayerVsIA;
private JMenu mnOptions;
private JMenuItem mntmKeyConfig;
private JLabel lblConnectstatut;
private JMenu mnNetwork;
private JMenuItem mntmJoinSalon;
private SalonGUI salon;
private ConfigGUI configGUI = new ConfigGUI();
public static Main himself;
private Party p = new Party();
public void loadParameter(){
try {
Config.NAME = (getParameter("NAME") != null) ? getParameter("NAME") : Config.NAME;
Config.PWD = (getParameter("PWD") != null) ? getParameter("PWD") : Config.PWD;
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void init() {
loadParameter();
}
@Override
public void destroy() {
NetworkClient.get().interrupt();
}
public static void main(String[] args) {
try {
UIManager.setLookAndFeel("com.nilo.plaf.nimrod.NimRODLookAndFeel");
} catch (Throwable e) {
e.printStackTrace();
}
ActionPuyo ac = new ActionPuyo(EMovePlateau.DOWN, null);
try{
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutput out = new ObjectOutputStream(bos);
out.writeObject(ac);
byte[] acBytes = bos.toByteArray();
out.close();
bos.close();
FileOutputStream f = new FileOutputStream(new File("serial"));
f.write(acBytes);
f.close();
}catch (Exception e) {
// TODO: handle exception
}
LoginGUI log = new LoginGUI();
}
/**
* Create the applet.
*/
public Main () {
himself = this;
setName("Puyo Puyo");
try {
UIManager.setLookAndFeel("com.nilo.plaf.nimrod.NimRODLookAndFeel");
} catch (Throwable e) {
e.printStackTrace();
}
menuBar = new JMenuBar();
setJMenuBar(menuBar);
mnGame = new JMenu("Jeu");
menuBar.add(mnGame);
mntmPlayerVsIA = new JMenuItem("Joueur VS IA");
mntmPlayerVsIA.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
if ( p != null ) {
p.stopGame();
p = new Party();
//p.addPlayer(new Player(Config.NAME, 1000));
p.addPlayer(new IAPlayer());
p.addPlayer(new IAPlayer());
p.addPlayer(new IAPlayer());
p.addPlayer(new IAPlayer());
p.addPlayer(new IAPlayer());
p.addPlayer(new IAPlayer());
startParty(p);
}
}
});
mnGame.add(mntmPlayerVsIA);
mnNetwork = new JMenu("R\u00E9seau");
menuBar.add(mnNetwork);
mntmJoinSalon = new JMenuItem("Rejoindre le salon");
mntmJoinSalon.setEnabled(false);
mntmJoinSalon.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
if ( salon == null ) {
salon = new SalonGUI();
}
salon.setVisible(true);
}
});
mnNetwork.add(mntmJoinSalon);
mnOptions = new JMenu("Options");
menuBar.add(mnOptions);
mntmKeyConfig = new JMenuItem("Configurations");
mntmKeyConfig.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
configGUI.setVisible(true);
}
});
mnOptions.add(mntmKeyConfig);
lblConnectstatut = new JLabel("");
lblConnectstatut.setIcon(new ImageIcon(Main.class.getResource("/res/images/serv_OFF.png")));
menuBar.add(lblConnectstatut);
getContentPane().setLayout(new FlowLayout(FlowLayout.CENTER, 5, 5));
NetworkClient.get().addObserver(this);
}
@Override
public void update(Observable arg0, Object arg1) {
if (arg1 instanceof ENetworkClient){
switch ((ENetworkClient)arg1){
case DISCONNECTED:
if ( salon != null ) {
salon.setVisible(false);
}
mntmJoinSalon.setEnabled(false);
lblConnectstatut.setIcon(new ImageIcon(Main.class.getResource("/res/images/serv_OFF.png")));
break;
case CONNECTED:
if ( salon != null ) {
salon.setVisible(false);
}
mntmJoinSalon.setEnabled(true);
lblConnectstatut.setIcon(new ImageIcon(Main.class.getResource("/res/images/serv_ON.png")));
break;
}
}
}
public void startParty(Party p) {
getContentPane().removeAll();
Player him = p.getPlayer(Config.NAME);
if ( him != null ) {
ManageKeyboard mnk = new ManageKeyboard(p.getPlayersMap().get(him));
addKeyListener(mnk);
}
getContentPane().add(new PartyGUI(p));
p.startGame();
validate();
repaint();
requestFocus();
}
public static String getParamApplet(String param){
return himself.getParameter(param);
}
}
| |
/*
* Copyright (C) 2010 Olafur Gauti Gudmundsson
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.mongodb.morphia;
import com.mongodb.WriteConcern;
import com.mongodb.client.model.UpdateOptions;
import org.junit.Assert;
import org.junit.Test;
import org.mongodb.morphia.entities.version.AbstractVersionedBase;
import org.mongodb.morphia.entities.version.Versioned;
import org.mongodb.morphia.entities.version.VersionedChildEntity;
import org.mongodb.morphia.mapping.MappedClass;
import org.mongodb.morphia.query.Query;
import org.mongodb.morphia.query.UpdateOperations;
import java.util.ArrayList;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.List;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
public class TestVersionAnnotation extends TestBase {
@Test
public void testBulkUpdate() {
final Datastore datastore = getDatastore();
Versioned entity = new Versioned();
entity.setName("Value 1");
datastore.save(entity);
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 1", entity.getName());
Assert.assertEquals(1, entity.getVersion().longValue());
entity.setName("Value 2");
datastore.save(entity);
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 2", entity.getName());
Assert.assertEquals(2, entity.getVersion().longValue());
Query<Versioned> query = datastore.find(Versioned.class);
query.filter("id", entity.getId());
UpdateOperations<Versioned> ops = datastore.createUpdateOperations(Versioned.class);
ops.set("name", "Value 3");
datastore.updateMany(query, ops);
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 3", entity.getName());
Assert.assertEquals(3, entity.getVersion().longValue());
}
@Test
public void testCanMapAPackageContainingAVersionedAbstractBaseClass() {
getMapper().mapPackage("org.mongodb.morphia.entities.version");
Collection<MappedClass> mappedClasses = getMapper().getMappedClasses();
assertThat(mappedClasses.size(), is(3));
List<Class<?>> list = new ArrayList<>();
for (MappedClass mappedClass : mappedClasses) {
list.add(mappedClass.getClazz());
}
assertTrue(list.contains(VersionedChildEntity.class));
assertTrue(list.contains(AbstractVersionedBase.class));
assertTrue(list.contains(Versioned.class));
}
@Test
public void testCanMapAnEntityWithAnAbstractVersionedParent() {
getMapper().map(VersionedChildEntity.class);
Collection<MappedClass> mappedClasses = getMapper().getMappedClasses();
assertThat(mappedClasses.size(), is(2));
List<Class<?>> list = new ArrayList<>();
for (MappedClass mappedClass : mappedClasses) {
list.add(mappedClass.getClazz());
}
assertTrue(list.contains(VersionedChildEntity.class));
assertTrue(list.contains(AbstractVersionedBase.class));
}
@Test
public void testEntityUpdate() {
final Datastore datastore = getDatastore();
Versioned entity = new Versioned();
entity.setName("Value 1");
datastore.save(entity);
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 1", entity.getName());
Assert.assertEquals(1, entity.getVersion().longValue());
entity.setName("Value 2");
datastore.save(entity);
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 2", entity.getName());
Assert.assertEquals(2, entity.getVersion().longValue());
UpdateOperations<Versioned> ops = datastore.createUpdateOperations(Versioned.class);
ops.set("name", "Value 3");
Assert.assertEquals(1, datastore.update(entity, ops).getModifiedCount());
Assert.assertEquals(0, datastore.update(entity, ops).getModifiedCount());
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 3", entity.getName());
Assert.assertEquals(3, entity.getVersion().longValue());
ops = datastore.createUpdateOperations(Versioned.class);
ops.set("name", "Value 4");
datastore.update(datastore.getKey(entity), ops, new UpdateOptions(), WriteConcern.ACKNOWLEDGED);
entity = datastore.get(Versioned.class, entity.getId());
Assert.assertEquals("Value 4", entity.getName());
Assert.assertEquals(4, entity.getVersion().longValue());
}
@Test
public void testIncVersionNotOverridingOtherInc() {
final Versioned version1 = new Versioned();
version1.setCount(0);
getDatastore().save(version1);
assertEquals(new Long(1), version1.getVersion());
assertEquals(0, version1.getCount());
Query<Versioned> query = getDatastore().find(Versioned.class);
query.field("_id").equal(version1.getId());
UpdateOperations<Versioned> up = getDatastore().createUpdateOperations(Versioned.class).inc("count");
getDatastore().updateOne(query, up, new UpdateOptions().upsert(true), getDatastore().getDefaultWriteConcern());
final Versioned version2 = getDatastore().get(Versioned.class, version1.getId());
assertEquals(new Long(2), version2.getVersion());
assertEquals(1, version2.getCount());
}
@Test(expected = ConcurrentModificationException.class)
public void testThrowsExceptionWhenTryingToSaveAnOldVersion() {
final Versioned version1 = new Versioned();
getDatastore().save(version1);
getDatastore().save(getDatastore().get(Versioned.class, version1.getId()));
getDatastore().save(version1);
}
@Test
public void testUpdatesToVersionedFileAreReflectedInTheDatastore() {
final Versioned version1 = new Versioned();
version1.setName("foo");
this.getDatastore().save(version1);
final Versioned version1Updated = getDatastore().get(Versioned.class, version1.getId());
version1Updated.setName("bar");
this.getDatastore().merge(version1Updated);
final Versioned versionedEntityFromDs = this.getDatastore().get(Versioned.class, version1.getId());
assertEquals(version1Updated.getName(), versionedEntityFromDs.getName());
}
@Test
public void testVersionNumbersIncrementWithEachSave() {
final Versioned version1 = new Versioned();
getDatastore().save(version1);
assertEquals(new Long(1), version1.getVersion());
final Versioned version2 = getDatastore().get(Versioned.class, version1.getId());
getDatastore().save(version2);
assertEquals(new Long(2), version2.getVersion());
}
@Test
public void testVersionedInserts() {
List<Versioned> list = asList(new Versioned(), new Versioned(), new Versioned(), new Versioned(), new Versioned());
getAds().insertMany(list);
for (Versioned versioned : list) {
assertNotNull(versioned.getVersion());
}
}
@Test
public void testVersionedUpsert() {
final Datastore datastore = getDatastore();
Versioned entity = new Versioned();
entity.setName("Value 1");
Query<Versioned> query = datastore.find(Versioned.class);
query.filter("name", "Value 1");
UpdateOperations<Versioned> ops = datastore.createUpdateOperations(Versioned.class);
ops.set("name", "Value 3");
datastore.updateOne(query, ops, new UpdateOptions().upsert(true), getDatastore().getDefaultWriteConcern());
entity = datastore.find(Versioned.class).get();
Assert.assertEquals("Value 3", entity.getName());
Assert.assertEquals(1, entity.getVersion().longValue());
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.structuralsearch.impl.matcher.compiler;
import com.intellij.dupLocator.iterators.NodeIterator;
import com.intellij.psi.*;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.javadoc.PsiDocTag;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiShortNamesCache;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.structuralsearch.MalformedPatternException;
import com.intellij.structuralsearch.SSRBundle;
import com.intellij.structuralsearch.UnsupportedPatternException;
import com.intellij.structuralsearch.impl.matcher.CompiledPattern;
import com.intellij.structuralsearch.impl.matcher.JavaCompiledPattern;
import com.intellij.structuralsearch.impl.matcher.filters.*;
import com.intellij.structuralsearch.impl.matcher.handlers.*;
import com.intellij.structuralsearch.impl.matcher.iterators.DocValuesIterator;
import com.intellij.structuralsearch.impl.matcher.predicates.RegExpPredicate;
import com.intellij.structuralsearch.impl.matcher.strategies.CommentMatchingStrategy;
import com.intellij.structuralsearch.impl.matcher.strategies.ExprMatchingStrategy;
import com.intellij.structuralsearch.impl.matcher.strategies.JavaDocMatchingStrategy;
import com.intellij.structuralsearch.impl.matcher.strategies.MatchingStrategy;
import com.intellij.util.Processor;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author Eugene.Kudelevsky
*/
public class JavaCompilingVisitor extends JavaRecursiveElementWalkingVisitor {
private final GlobalCompilingVisitor myCompilingVisitor;
@NonNls private static final String COMMENT = "\\s*(__\\$_\\w+)\\s*";
private static final Pattern ourPattern = Pattern.compile("//" + COMMENT, Pattern.DOTALL);
private static final Pattern ourPattern2 = Pattern.compile("/\\*" + COMMENT + "\\*/", Pattern.DOTALL);
private static final Pattern ourPattern3 = Pattern.compile("/\\*\\*" + COMMENT + "\\*/", Pattern.DOTALL);
public JavaCompilingVisitor(GlobalCompilingVisitor compilingVisitor) {
this.myCompilingVisitor = compilingVisitor;
}
@Override
public void visitDocTag(PsiDocTag psiDocTag) {
super.visitDocTag(psiDocTag);
NodeIterator sons = new DocValuesIterator(psiDocTag.getFirstChild());
while (sons.hasNext()) {
myCompilingVisitor.setHandler(sons.current(), new DocDataHandler());
sons.advance();
}
}
@Override
public void visitComment(PsiComment comment) {
super.visitComment(comment);
final String text = comment.getText();
Matcher matcher = ourPattern.matcher(text);
boolean matches = false;
if (!matcher.matches()) {
matcher = ourPattern2.matcher(text);
if (!matcher.matches()) {
matcher = ourPattern3.matcher(text);
}
else {
matches = true;
}
}
else {
matches = true;
}
if (matches || matcher.matches()) {
String str = matcher.group(1);
comment.putUserData(CompiledPattern.HANDLER_KEY, str);
GlobalCompilingVisitor.setFilter(
myCompilingVisitor.getContext().getPattern().getHandler(comment),
CommentFilter.getInstance()
);
SubstitutionHandler handler = (SubstitutionHandler)myCompilingVisitor.getContext().getPattern().getHandler(str);
if (handler == null) {
throw new MalformedPatternException();
}
if (handler.getPredicate() != null) {
((RegExpPredicate)handler.getPredicate()).setMultiline(true);
}
RegExpPredicate predicate = MatchingHandler.getSimpleRegExpPredicate(handler);
if (GlobalCompilingVisitor.isSuitablePredicate(predicate, handler)) {
myCompilingVisitor.processTokenizedName(predicate.getRegExp(), true, GlobalCompilingVisitor.OccurenceKind.COMMENT);
}
matches = true;
}
if (!matches) {
MatchingHandler handler = myCompilingVisitor.processPatternStringWithFragments(text, GlobalCompilingVisitor.OccurenceKind.COMMENT);
if (handler != null) comment.putUserData(CompiledPattern.HANDLER_KEY, handler);
}
}
@Override
public void visitLiteralExpression(PsiLiteralExpression expression) {
String value = expression.getText();
if (value.length() > 2 && value.charAt(0) == '"' && value.charAt(value.length() - 1) == '"') {
@Nullable MatchingHandler handler =
myCompilingVisitor.processPatternStringWithFragments(value, GlobalCompilingVisitor.OccurenceKind.LITERAL);
if (handler != null) {
expression.putUserData(CompiledPattern.HANDLER_KEY, handler);
}
}
super.visitLiteralExpression(expression);
}
@Override
public void visitField(PsiField psiField) {
super.visitField(psiField);
CompiledPattern pattern = myCompilingVisitor.getContext().getPattern();
final MatchingHandler handler = pattern.getHandler(psiField);
if (needsSupers(psiField, handler)) {
assert pattern instanceof JavaCompiledPattern;
((JavaCompiledPattern)pattern).setRequestsSuperFields(true);
}
}
@Override
public void visitMethod(PsiMethod psiMethod) {
super.visitMethod(psiMethod);
CompiledPattern pattern = myCompilingVisitor.getContext().getPattern();
final MatchingHandler handler = pattern.getHandler(psiMethod);
if (needsSupers(psiMethod, handler)) {
assert pattern instanceof JavaCompiledPattern;
((JavaCompiledPattern)pattern).setRequestsSuperMethods(true);
}
GlobalCompilingVisitor.setFilter(handler, MethodFilter.getInstance());
handleReferenceText(psiMethod.getName(), myCompilingVisitor.getContext());
}
@Override
public void visitReferenceExpression(PsiReferenceExpression reference) {
visitElement(reference);
boolean typedVarProcessed = false;
final PsiElement referenceParent = reference.getParent();
if ((myCompilingVisitor.getContext().getPattern().isRealTypedVar(reference)) &&
reference.getQualifierExpression() == null &&
!(referenceParent instanceof PsiExpressionStatement)
) {
// typed var for expression (but not top level)
MatchingHandler handler = myCompilingVisitor.getContext().getPattern().getHandler(reference);
GlobalCompilingVisitor.setFilter(handler, ExpressionFilter.getInstance());
typedVarProcessed = true;
}
if (!(referenceParent instanceof PsiMethodCallExpression)) {
handleReference(reference);
}
MatchingHandler handler = myCompilingVisitor.getContext().getPattern().getHandler(reference);
// We want to merge qname related to class to find it in any form
final String referencedName = reference.getReferenceName();
if (!typedVarProcessed &&
!(handler instanceof SubstitutionHandler)) {
final PsiElement resolve = reference.resolve();
PsiElement referenceQualifier = reference.getQualifier();
if (resolve instanceof PsiClass ||
(resolve == null &&
((referencedName != null && Character.isUpperCase(referencedName.charAt(0))) ||
referenceQualifier == null
)
)
) {
boolean hasNoNestedSubstitutionHandlers = false;
PsiExpression qualifier;
PsiReferenceExpression currentReference = reference;
while ((qualifier = currentReference.getQualifierExpression()) != null) {
if (!(qualifier instanceof PsiReferenceExpression) ||
myCompilingVisitor.getContext().getPattern().getHandler(qualifier) instanceof SubstitutionHandler
) {
hasNoNestedSubstitutionHandlers = true;
break;
}
currentReference = (PsiReferenceExpression)qualifier;
}
if (!hasNoNestedSubstitutionHandlers && PsiTreeUtil.getChildOfType(reference, PsiAnnotation.class) == null) {
createAndSetSubstitutionHandlerFromReference(
reference,
resolve != null ? ((PsiClass)resolve).getQualifiedName() : reference.getText(),
referenceParent instanceof PsiReferenceExpression
);
}
}
else if (referenceQualifier != null && reference.getParent() instanceof PsiExpressionStatement) {
//Handler qualifierHandler = context.pattern.getHandler(referenceQualifier);
//if (qualifierHandler instanceof SubstitutionHandler &&
// !context.pattern.isRealTypedVar(reference)
// ) {
// createAndSetSubstitutionHandlerFromReference(reference, referencedName);
//
// SubstitutionHandler substitutionHandler = (SubstitutionHandler)qualifierHandler;
// RegExpPredicate expPredicate = Handler.getSimpleRegExpPredicate(substitutionHandler);
// //if (expPredicate != null)
// // substitutionHandler.setPredicate(new ExprTypePredicate(expPredicate.getRegExp(), null, true, true, false));
//}
}
}
}
@Override
public void visitMethodCallExpression(PsiMethodCallExpression expression) {
handleReference(expression.getMethodExpression());
super.visitMethodCallExpression(expression);
}
@Override
public void visitBlockStatement(PsiBlockStatement psiBlockStatement) {
super.visitBlockStatement(psiBlockStatement);
myCompilingVisitor.getContext().getPattern().getHandler(psiBlockStatement).setFilter(BlockFilter.getInstance());
}
@Override
public void visitVariable(PsiVariable psiVariable) {
super.visitVariable(psiVariable);
myCompilingVisitor.getContext().getPattern().getHandler(psiVariable).setFilter(VariableFilter.getInstance());
handleReferenceText(psiVariable.getName(), myCompilingVisitor.getContext());
}
@Override
public void visitDeclarationStatement(PsiDeclarationStatement psiDeclarationStatement) {
super.visitDeclarationStatement(psiDeclarationStatement);
final PsiElement firstChild = psiDeclarationStatement.getFirstChild();
if (firstChild instanceof PsiTypeElement) {
// search for expression or symbol
final PsiJavaCodeReferenceElement reference = ((PsiTypeElement)firstChild).getInnermostComponentReferenceElement();
if (reference != null && reference.getParameterList().getTypeParameterElements().length > 0) {
myCompilingVisitor.setHandler(psiDeclarationStatement, new TypedSymbolHandler());
final MatchingHandler handler = myCompilingVisitor.getContext().getPattern().getHandler(psiDeclarationStatement);
// typed symbol
handler.setFilter(TypedSymbolNodeFilter.getInstance());
final PsiTypeElement[] params = reference.getParameterList().getTypeParameterElements();
for (PsiTypeElement param : params) {
if (param.getInnermostComponentReferenceElement() != null &&
(myCompilingVisitor.getContext().getPattern().isRealTypedVar(
param.getInnermostComponentReferenceElement().getReferenceNameElement()))
) {
myCompilingVisitor.getContext().getPattern().getHandler(param).setFilter(
TypeParameterFilter.getInstance()
);
}
}
return;
}
}
else if (firstChild instanceof PsiModifierList) {
final PsiModifierList modifierList = (PsiModifierList)firstChild;
final PsiAnnotation[] annotations = modifierList.getAnnotations();
if (annotations.length != 1) {
throw new MalformedPatternException();
}
for (String modifier : PsiModifier.MODIFIERS) {
if (modifierList.hasExplicitModifier(modifier)) {
throw new MalformedPatternException();
}
}
myCompilingVisitor.setHandler(psiDeclarationStatement, new AnnotationHandler());
final MatchingHandler handler = myCompilingVisitor.getContext().getPattern().getHandler(psiDeclarationStatement);
handler.setFilter(AnnotationFilter.getInstance());
return;
}
final MatchingHandler handler = new DeclarationStatementHandler();
myCompilingVisitor.getContext().getPattern().setHandler(psiDeclarationStatement, handler);
final PsiElement previousNonWhiteSpace = PsiTreeUtil.skipSiblingsBackward(psiDeclarationStatement, PsiWhiteSpace.class);
if (previousNonWhiteSpace instanceof PsiComment) {
((DeclarationStatementHandler)handler).setCommentHandler(myCompilingVisitor.getContext().getPattern().getHandler(previousNonWhiteSpace));
myCompilingVisitor.getContext().getPattern().setHandler(previousNonWhiteSpace, handler);
}
// detect typed symbol, it will have no variable
handler.setFilter(DeclarationFilter.getInstance());
}
@Override
public void visitDocComment(PsiDocComment psiDocComment) {
super.visitDocComment(psiDocComment);
myCompilingVisitor.getContext().getPattern().getHandler(psiDocComment).setFilter(JavaDocFilter.getInstance());
}
@Override
public void visitReferenceElement(PsiJavaCodeReferenceElement reference) {
super.visitReferenceElement(reference);
if (reference.getParent() != null &&
reference.getParent().getParent() instanceof PsiClass) {
GlobalCompilingVisitor.setFilter(myCompilingVisitor.getContext().getPattern().getHandler(reference), TypeFilter.getInstance());
}
handleReference(reference);
}
@Override
public void visitClass(PsiClass psiClass) {
super.visitClass(psiClass);
CompiledPattern pattern = myCompilingVisitor.getContext().getPattern();
final MatchingHandler handler = pattern.getHandler(psiClass);
if (needsSupers(psiClass, handler)) {
((JavaCompiledPattern)pattern).setRequestsSuperInners(true);
}
handleReferenceText(psiClass.getName(), myCompilingVisitor.getContext());
GlobalCompilingVisitor.setFilter(handler, ClassFilter.getInstance());
}
private SubstitutionHandler createAndSetSubstitutionHandlerFromReference(final PsiElement expr, final String referenceText,
boolean classQualifier) {
final SubstitutionHandler substitutionHandler =
new SubstitutionHandler("__" + referenceText.replace('.', '_'), false, classQualifier ? 0 : 1, 1, false);
final boolean caseSensitive = myCompilingVisitor.getContext().getOptions().isCaseSensitiveMatch();
substitutionHandler.setPredicate(new RegExpPredicate(referenceText.replaceAll("\\.", "\\\\."), caseSensitive, null, false, false));
myCompilingVisitor.getContext().getPattern().setHandler(expr, substitutionHandler);
return substitutionHandler;
}
@Override
public void visitExpressionStatement(PsiExpressionStatement expr) {
myCompilingVisitor.handle(expr);
super.visitExpressionStatement(expr);
final PsiElement child = expr.getLastChild();
if (!(child instanceof PsiJavaToken) && !(child instanceof PsiComment)) {
// search for expression or symbol
final PsiElement reference = expr.getFirstChild();
MatchingHandler referenceHandler = myCompilingVisitor.getContext().getPattern().getHandler(reference);
if (referenceHandler instanceof SubstitutionHandler && (reference instanceof PsiReferenceExpression)) {
// symbol
myCompilingVisitor.getContext().getPattern().setHandler(expr, referenceHandler);
referenceHandler.setFilter(SymbolNodeFilter.getInstance());
myCompilingVisitor.setHandler(expr, new SymbolHandler((SubstitutionHandler)referenceHandler));
}
else if (reference instanceof PsiLiteralExpression) {
MatchingHandler handler = new ExpressionHandler();
myCompilingVisitor.setHandler(expr, handler);
handler.setFilter(ConstantFilter.getInstance());
}
else {
// just expression
MatchingHandler handler;
myCompilingVisitor.setHandler(expr, handler = new ExpressionHandler());
handler.setFilter(ExpressionFilter.getInstance());
}
}
else if (expr.getExpression() instanceof PsiReferenceExpression &&
(myCompilingVisitor.getContext().getPattern().isRealTypedVar(expr.getExpression()))) {
// search for statement
final MatchingHandler exprHandler = myCompilingVisitor.getContext().getPattern().getHandler(expr);
if (exprHandler instanceof SubstitutionHandler) {
SubstitutionHandler handler = (SubstitutionHandler)exprHandler;
handler.setFilter(new StatementFilter());
handler.setMatchHandler(new StatementHandler());
}
}
}
@Override
public void visitElement(PsiElement element) {
myCompilingVisitor.handle(element);
super.visitElement(element);
}
private void handleReference(PsiJavaCodeReferenceElement reference) {
if (shouldOccur(reference)) {
handleReferenceText(reference.getReferenceName(), myCompilingVisitor.getContext());
}
}
private boolean shouldOccur(PsiJavaCodeReferenceElement reference) {
final CompileContext compileContext = myCompilingVisitor.getContext();
final PsiElement parent = reference.getParent();
if (!(parent instanceof PsiReferenceList)) {
return true;
}
final PsiElement grandParent = parent.getParent();
if (!(grandParent instanceof PsiMethod)) {
return true;
}
final PsiMethod method = (PsiMethod)grandParent;
if (method.getThrowsList() != parent) {
return true;
}
final String name = method.getName();
if (!compileContext.getPattern().isTypedVar(name)) {
return true;
}
final SubstitutionHandler handler = (SubstitutionHandler)compileContext.getPattern().getHandler(name);
return !(handler != null && handler.getMinOccurs() == 0);
}
private static void handleReferenceText(String refname, CompileContext compileContext) {
if (refname == null) return;
if (compileContext.getPattern().isTypedVar(refname)) {
SubstitutionHandler handler = (SubstitutionHandler)compileContext.getPattern().getHandler(refname);
RegExpPredicate predicate = MatchingHandler.getSimpleRegExpPredicate(handler);
if (!GlobalCompilingVisitor.isSuitablePredicate(predicate, handler)) {
return;
}
refname = predicate.getRegExp();
if (handler.isStrictSubtype() || handler.isSubtype()) {
final OptimizingSearchHelper searchHelper = compileContext.getSearchHelper();
if (addDescendantsOf(refname, handler.isSubtype(), searchHelper, compileContext)) {
searchHelper.endTransaction();
}
return;
}
}
GlobalCompilingVisitor.addFilesToSearchForGivenWord(refname, true, GlobalCompilingVisitor.OccurenceKind.CODE, compileContext);
}
public static boolean addDescendantsOf(final String refname, final boolean subtype, OptimizingSearchHelper searchHelper, CompileContext context) {
final List<PsiClass> classes = buildDescendants(refname, subtype, searchHelper, context);
for (final PsiClass aClass : classes) {
if (aClass instanceof PsiAnonymousClass) {
searchHelper.addWordToSearchInCode(((PsiAnonymousClass)aClass).getBaseClassReference().getReferenceName());
}
else {
searchHelper.addWordToSearchInCode(aClass.getName());
}
}
return classes.size() > 0;
}
private static List<PsiClass> buildDescendants(String className,
boolean includeSelf,
OptimizingSearchHelper searchHelper,
CompileContext context) {
if (!searchHelper.doOptimizing()) return Collections.emptyList();
final SearchScope scope = context.getOptions().getScope();
if (!(scope instanceof GlobalSearchScope)) return Collections.emptyList();
final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(context.getProject());
final PsiClass[] classes = cache.getClassesByName(className, (GlobalSearchScope)scope);
final List<PsiClass> results = new ArrayList<>();
final Processor<PsiClass> processor = aClass -> {
results.add(aClass);
return true;
};
for (PsiClass aClass : classes) {
ClassInheritorsSearch.search(aClass, scope, true).forEach(processor);
}
if (includeSelf) {
Collections.addAll(results, classes);
}
return results;
}
@Override
public void visitCodeBlock(PsiCodeBlock block) {
myCompilingVisitor.setCodeBlockLevel(myCompilingVisitor.getCodeBlockLevel() + 1);
MatchingStrategy strategy = null;
for (PsiElement el = block.getFirstChild(); el != null; el = el.getNextSibling()) {
if (GlobalCompilingVisitor.getFilter().accepts(el)) {
if (el instanceof PsiWhiteSpace) {
myCompilingVisitor.addLexicalNode(el);
}
}
else {
el.accept(this);
if (myCompilingVisitor.getCodeBlockLevel() == 1) {
MatchingStrategy newstrategy = findStrategy(el);
final MatchingHandler matchingHandler = myCompilingVisitor.getContext().getPattern().getHandler(el);
myCompilingVisitor.getContext().getPattern().setHandler(el, new TopLevelMatchingHandler(matchingHandler));
if (strategy == null || (strategy instanceof JavaDocMatchingStrategy)) {
strategy = newstrategy;
}
else {
if (strategy.getClass() != newstrategy.getClass()) {
if (!(strategy instanceof CommentMatchingStrategy)) {
throw new UnsupportedPatternException(SSRBundle.message("different.strategies.for.top.level.nodes.error.message"));
}
strategy = newstrategy;
}
}
}
}
}
if (myCompilingVisitor.getCodeBlockLevel() == 1) {
if (strategy == null) {
// this should happen only for error patterns
strategy = ExprMatchingStrategy.getInstance();
}
myCompilingVisitor.getContext().getPattern().setStrategy(strategy);
}
myCompilingVisitor.setCodeBlockLevel(myCompilingVisitor.getCodeBlockLevel() - 1);
}
private static MatchingStrategy findStrategy(PsiElement el) {
if (el instanceof PsiDocComment) {
return JavaDocMatchingStrategy.getInstance();
}
else if (el instanceof PsiComment) {
return CommentMatchingStrategy.getInstance();
}
return ExprMatchingStrategy.getInstance();
}
private static boolean needsSupers(final PsiElement element, final MatchingHandler handler) {
if (element.getParent() instanceof PsiClass &&
handler instanceof SubstitutionHandler
) {
final SubstitutionHandler handler2 = (SubstitutionHandler)handler;
return (handler2.isStrictSubtype() || handler2.isSubtype());
}
return false;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.analyzer;
import com.google.common.collect.ImmutableList;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import io.airlift.configuration.DefunctConfig;
import io.airlift.units.DataSize;
import javax.validation.constraints.Min;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import static com.facebook.presto.sql.analyzer.RegexLibrary.JONI;
@DefunctConfig({
"resource-group-manager",
"experimental-syntax-enabled",
"analyzer.experimental-syntax-enabled"
})
public class FeaturesConfig
{
public static class ProcessingOptimization
{
public static final String DISABLED = "disabled";
public static final String COLUMNAR = "columnar";
public static final String COLUMNAR_DICTIONARY = "columnar_dictionary";
public static final List<String> AVAILABLE_OPTIONS = ImmutableList.of(DISABLED, COLUMNAR, COLUMNAR_DICTIONARY);
}
private boolean distributedIndexJoinsEnabled;
private boolean distributedJoinsEnabled = true;
private boolean colocatedJoinsEnabled;
private boolean reorderJoins;
private boolean redistributeWrites = true;
private boolean optimizeMetadataQueries;
private boolean optimizeHashGeneration = true;
private boolean optimizeSingleDistinct = true;
private boolean optimizerReorderWindows = true;
private boolean pushTableWriteThroughUnion = true;
private boolean legacyArrayAgg;
private boolean legacyOrderBy;
private boolean legacyMapSubscript;
private boolean optimizeMixedDistinctAggregations;
private String processingOptimization = ProcessingOptimization.DISABLED;
private boolean dictionaryAggregation;
private boolean resourceGroups;
private int re2JDfaStatesLimit = Integer.MAX_VALUE;
private int re2JDfaRetries = 5;
private RegexLibrary regexLibrary = JONI;
private boolean spillEnabled;
private DataSize operatorMemoryLimitBeforeSpill = new DataSize(4, DataSize.Unit.MEGABYTE);
private Path spillerSpillPath = Paths.get(System.getProperty("java.io.tmpdir"), "presto", "spills");
private int spillerThreads = 4;
private boolean iterativeOptimizerEnabled;
public boolean isResourceGroupsEnabled()
{
return resourceGroups;
}
@Config("experimental.resource-groups-enabled")
public FeaturesConfig setResourceGroupsEnabled(boolean enabled)
{
resourceGroups = enabled;
return this;
}
public boolean isDistributedIndexJoinsEnabled()
{
return distributedIndexJoinsEnabled;
}
@Config("distributed-index-joins-enabled")
public FeaturesConfig setDistributedIndexJoinsEnabled(boolean distributedIndexJoinsEnabled)
{
this.distributedIndexJoinsEnabled = distributedIndexJoinsEnabled;
return this;
}
public boolean isDistributedJoinsEnabled()
{
return distributedJoinsEnabled;
}
@Config("deprecated.legacy-array-agg")
public FeaturesConfig setLegacyArrayAgg(boolean legacyArrayAgg)
{
this.legacyArrayAgg = legacyArrayAgg;
return this;
}
public boolean isLegacyArrayAgg()
{
return legacyArrayAgg;
}
@Config("deprecated.legacy-order-by")
public FeaturesConfig setLegacyOrderBy(boolean value)
{
this.legacyOrderBy = value;
return this;
}
public boolean isLegacyOrderBy()
{
return legacyOrderBy;
}
@Config("deprecated.legacy-map-subscript")
public FeaturesConfig setLegacyMapSubscript(boolean value)
{
this.legacyMapSubscript = value;
return this;
}
public boolean isLegacyMapSubscript()
{
return legacyMapSubscript;
}
@Config("distributed-joins-enabled")
public FeaturesConfig setDistributedJoinsEnabled(boolean distributedJoinsEnabled)
{
this.distributedJoinsEnabled = distributedJoinsEnabled;
return this;
}
public boolean isColocatedJoinsEnabled()
{
return colocatedJoinsEnabled;
}
@Config("colocated-joins-enabled")
@ConfigDescription("Experimental: Use a colocated join when possible")
public FeaturesConfig setColocatedJoinsEnabled(boolean colocatedJoinsEnabled)
{
this.colocatedJoinsEnabled = colocatedJoinsEnabled;
return this;
}
public boolean isJoinReorderingEnabled()
{
return reorderJoins;
}
@Config("reorder-joins")
@ConfigDescription("Experimental: Reorder joins to optimize plan")
public FeaturesConfig setJoinReorderingEnabled(boolean reorderJoins)
{
this.reorderJoins = reorderJoins;
return this;
}
public boolean isRedistributeWrites()
{
return redistributeWrites;
}
@Config("redistribute-writes")
public FeaturesConfig setRedistributeWrites(boolean redistributeWrites)
{
this.redistributeWrites = redistributeWrites;
return this;
}
public boolean isOptimizeMetadataQueries()
{
return optimizeMetadataQueries;
}
@Config("optimizer.optimize-metadata-queries")
public FeaturesConfig setOptimizeMetadataQueries(boolean optimizeMetadataQueries)
{
this.optimizeMetadataQueries = optimizeMetadataQueries;
return this;
}
public boolean isOptimizeHashGeneration()
{
return optimizeHashGeneration;
}
@Config("optimizer.optimize-hash-generation")
public FeaturesConfig setOptimizeHashGeneration(boolean optimizeHashGeneration)
{
this.optimizeHashGeneration = optimizeHashGeneration;
return this;
}
public boolean isOptimizeSingleDistinct()
{
return optimizeSingleDistinct;
}
@Config("optimizer.optimize-single-distinct")
public FeaturesConfig setOptimizeSingleDistinct(boolean optimizeSingleDistinct)
{
this.optimizeSingleDistinct = optimizeSingleDistinct;
return this;
}
public boolean isReorderWindows()
{
return optimizerReorderWindows;
}
@Config("optimizer.reorder-windows")
public FeaturesConfig setReorderWindows(boolean reorderWindows)
{
this.optimizerReorderWindows = reorderWindows;
return this;
}
public boolean isPushTableWriteThroughUnion()
{
return pushTableWriteThroughUnion;
}
@Config("optimizer.push-table-write-through-union")
public FeaturesConfig setPushTableWriteThroughUnion(boolean pushTableWriteThroughUnion)
{
this.pushTableWriteThroughUnion = pushTableWriteThroughUnion;
return this;
}
public String getProcessingOptimization()
{
return processingOptimization;
}
@Config("optimizer.processing-optimization")
public FeaturesConfig setProcessingOptimization(String processingOptimization)
{
if (!ProcessingOptimization.AVAILABLE_OPTIONS.contains(processingOptimization)) {
throw new IllegalStateException(String.format("Value %s is not valid for processingOptimization.", processingOptimization));
}
this.processingOptimization = processingOptimization;
return this;
}
public boolean isDictionaryAggregation()
{
return dictionaryAggregation;
}
@Config("optimizer.dictionary-aggregation")
public FeaturesConfig setDictionaryAggregation(boolean dictionaryAggregation)
{
this.dictionaryAggregation = dictionaryAggregation;
return this;
}
@Min(2)
public int getRe2JDfaStatesLimit()
{
return re2JDfaStatesLimit;
}
@Config("re2j.dfa-states-limit")
public FeaturesConfig setRe2JDfaStatesLimit(int re2JDfaStatesLimit)
{
this.re2JDfaStatesLimit = re2JDfaStatesLimit;
return this;
}
@Min(0)
public int getRe2JDfaRetries()
{
return re2JDfaRetries;
}
@Config("re2j.dfa-retries")
public FeaturesConfig setRe2JDfaRetries(int re2JDfaRetries)
{
this.re2JDfaRetries = re2JDfaRetries;
return this;
}
public RegexLibrary getRegexLibrary()
{
return regexLibrary;
}
@Config("regex-library")
public FeaturesConfig setRegexLibrary(RegexLibrary regexLibrary)
{
this.regexLibrary = regexLibrary;
return this;
}
public boolean isSpillEnabled()
{
return spillEnabled;
}
@Config("experimental.spill-enabled")
public FeaturesConfig setSpillEnabled(boolean spillEnabled)
{
this.spillEnabled = spillEnabled;
return this;
}
public boolean isIterativeOptimizerEnabled()
{
return iterativeOptimizerEnabled;
}
@Config("experimental.iterative-optimizer-enabled")
public FeaturesConfig setIterativeOptimizerEnabled(boolean value)
{
this.iterativeOptimizerEnabled = value;
return this;
}
public DataSize getOperatorMemoryLimitBeforeSpill()
{
return operatorMemoryLimitBeforeSpill;
}
@Config("experimental.operator-memory-limit-before-spill")
public FeaturesConfig setOperatorMemoryLimitBeforeSpill(DataSize operatorMemoryLimitBeforeSpill)
{
this.operatorMemoryLimitBeforeSpill = operatorMemoryLimitBeforeSpill;
return this;
}
public Path getSpillerSpillPath()
{
return spillerSpillPath;
}
@Config("experimental.spiller-spill-path")
public FeaturesConfig setSpillerSpillPath(String spillPath)
{
this.spillerSpillPath = Paths.get(spillPath);
return this;
}
public int getSpillerThreads()
{
return spillerThreads;
}
@Config("experimental.spiller-threads")
public FeaturesConfig setSpillerThreads(int spillerThreads)
{
this.spillerThreads = spillerThreads;
return this;
}
public boolean isOptimizeMixedDistinctAggregations()
{
return optimizeMixedDistinctAggregations;
}
@Config("optimizer.optimize-mixed-distinct-aggregations")
public FeaturesConfig setOptimizeMixedDistinctAggregations(boolean value)
{
this.optimizeMixedDistinctAggregations = value;
return this;
}
}
| |
/*
* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im.vector.activity;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.http.SslError;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.KeyEvent;
import android.webkit.SslErrorHandler;
import android.webkit.ValueCallback;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import com.google.gson.Gson;
import com.google.gson.internal.LinkedTreeMap;
import com.google.gson.reflect.TypeToken;
import im.vector.R;
import java.net.URLDecoder;
import java.util.HashMap;
/**
* FallbackLoginActivity is the fallback login activity
* i.e this activity is created when the client does not support the
*/
public class FallbackLoginActivity extends Activity {
private static final String LOG_TAG = "FallbackLoginAct";
public static String EXTRA_HOME_SERVER_ID = "FallbackLoginActivity.EXTRA_HOME_SERVER_ID";
WebView mWebView = null;
private String mHomeServerUrl = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login_fallback);
mWebView = (WebView) findViewById(R.id.account_creation_webview);
mWebView.getSettings().setJavaScriptEnabled(true);
Intent intent = getIntent();
mHomeServerUrl = "https://matrix.org/";
if (intent.hasExtra(EXTRA_HOME_SERVER_ID)) {
mHomeServerUrl = intent.getStringExtra(EXTRA_HOME_SERVER_ID);
}
// check the trailing slash
if (!mHomeServerUrl.endsWith("/")) {
mHomeServerUrl += "/";
}
// AppRTC requires third party cookies to work
android.webkit.CookieManager cookieManager = android.webkit.CookieManager.getInstance();
// clear the cookies must be cleared
if ((null != cookieManager) && !cookieManager.hasCookies()) {
launchWebView();
} else if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
try {
cookieManager.removeAllCookie();
} catch (Exception e) {
Log.e(LOG_TAG, " cookieManager.removeAllCookie() fails " + e.getLocalizedMessage());
}
launchWebView();
} else {
try {
cookieManager.removeAllCookies(new ValueCallback<Boolean>() {
@Override
public void onReceiveValue(Boolean value) {
launchWebView();
}
});
} catch (Exception e) {
Log.e(LOG_TAG, " cookieManager.removeAllCookie() fails " + e.getLocalizedMessage());
launchWebView();
}
}
}
private void launchWebView() {
mWebView.loadUrl(mHomeServerUrl + "_matrix/static/client/login/");
mWebView.setWebViewClient(new WebViewClient(){
@Override
public void onReceivedSslError(WebView view, SslErrorHandler handler,
SslError error) {
final SslErrorHandler fHander = handler;
AlertDialog.Builder builder = new AlertDialog.Builder(FallbackLoginActivity.this);
builder.setMessage(R.string.ssl_could_not_verify);
builder.setPositiveButton(R.string.ssl_trust, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
fHander.proceed();
}
});
builder.setNegativeButton(R.string.ssl_do_not_trust, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
fHander.cancel();
}
});
builder.setOnKeyListener(new DialogInterface.OnKeyListener() {
@Override
public boolean onKey(DialogInterface dialog, int keyCode, KeyEvent event) {
if (event.getAction() == KeyEvent.ACTION_UP && keyCode == KeyEvent.KEYCODE_BACK) {
fHander.cancel();
dialog.dismiss();
return true;
}
return false;
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
@Override
public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) {
super.onReceivedError(view, errorCode, description, failingUrl);
// on error case, close this activity
FallbackLoginActivity.this.runOnUiThread(new Runnable() {
@Override
public void run() {
FallbackLoginActivity.this.finish();
}
});
}
@Override
public void onPageFinished(WebView view, String url) {
// avoid infinite onPageFinished call
if (url.startsWith("http")) {
// Generic method to make a bridge between JS and the UIWebView
final String MXCJavascriptSendObjectMessage = "javascript:window.matrixLogin.sendObjectMessage = function(parameters) { var iframe = document.createElement('iframe'); iframe.setAttribute('src', 'js:' + JSON.stringify(parameters)); document.documentElement.appendChild(iframe); iframe.parentNode.removeChild(iframe); iframe = null; };";
view.loadUrl(MXCJavascriptSendObjectMessage);
// The function the fallback page calls when the registration is complete
final String MXCJavascriptOnRegistered = "javascript:window.matrixLogin.onLogin = function(homeserverUrl, userId, accessToken) { matrixLogin.sendObjectMessage({ 'action': 'onLogin', 'homeServer': homeserverUrl,'userId': userId, 'accessToken': accessToken }); };";
view.loadUrl(MXCJavascriptOnRegistered);
}
}
@Override
public boolean shouldOverrideUrlLoading(android.webkit.WebView view, java.lang.String url) {
if ((null != url) && url.startsWith("js:")) {
String json = url.substring(3);
HashMap<String, Object> serverParams = null;
try {
// URL decode
json = URLDecoder.decode(json, "UTF-8");
serverParams = new Gson().fromJson(json, new TypeToken<HashMap<String, Object>>() {}.getType());
} catch (Exception e) {
Log.e(LOG_TAG, "## shouldOverrideUrlLoading() : fromJson failed " + e.getMessage());
}
// succeeds to parse parameters
if (null != serverParams) {
try {
String action = (String) serverParams.get("action");
LinkedTreeMap<String, String> parameters = (LinkedTreeMap<String, String>)serverParams.get("homeServer");
if (TextUtils.equals("onLogin", action) && (null != parameters)) {
final String userId = parameters.get("user_id");
final String accessToken = parameters.get("access_token");
final String homeServer = parameters.get("home_server");
// remove the trailing /
if (mHomeServerUrl.endsWith("/")) {
mHomeServerUrl = mHomeServerUrl.substring(0, mHomeServerUrl.length() - 1);
}
// check if the parameters are defined
if ((null != homeServer) && (null != userId) && (null != accessToken)) {
FallbackLoginActivity.this.runOnUiThread(new Runnable() {
@Override
public void run() {
Intent returnIntent = new Intent();
returnIntent.putExtra("homeServerUrl", mHomeServerUrl);
returnIntent.putExtra("homeServer", homeServer);
returnIntent.putExtra("userId", userId);
returnIntent.putExtra("accessToken", accessToken);
setResult(RESULT_OK, returnIntent);
FallbackLoginActivity.this.finish();
}
});
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "## shouldOverrideUrlLoading() : failed " + e.getMessage());
}
}
return true;
}
return false;
}
});
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU) {
// This is to fix a bug in the v7 support lib. If there is no options menu and you hit MENU, it will crash with a
// NPE @ android.support.v7.app.ActionBarImplICS.getThemedContext(ActionBarImplICS.java:274)
// This can safely be removed if we add in menu options on this screen
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public void onLowMemory() {
super.onLowMemory();
CommonActivityUtils.onLowMemory(this);
}
@Override
public void onTrimMemory(int level) {
super.onTrimMemory(level);
CommonActivityUtils.onTrimMemory(this, level);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.quartz;
import java.util.Date;
import java.util.Map;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.camel.AsyncProcessor;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.Route;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.support.EndpointHelper;
import org.apache.camel.util.ObjectHelper;
import org.quartz.Calendar;
import org.quartz.CronTrigger;
import org.quartz.Job;
import org.quartz.JobBuilder;
import org.quartz.JobDetail;
import org.quartz.ObjectAlreadyExistsException;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.SimpleTrigger;
import org.quartz.Trigger;
import org.quartz.TriggerBuilder;
import org.quartz.TriggerKey;
import static org.quartz.CronScheduleBuilder.cronSchedule;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
/**
* Provides a scheduled delivery of messages using the Quartz 2.x scheduler.
*/
@UriEndpoint(firstVersion = "2.12.0", scheme = "quartz", title = "Quartz", syntax = "quartz:groupName/triggerName", consumerOnly = true, label = "scheduling")
public class QuartzEndpoint extends DefaultEndpoint {
private TriggerKey triggerKey;
private volatile AsyncProcessor processor;
// An internal variables to track whether a job has been in scheduler or not, and has it paused or not.
private final AtomicBoolean jobAdded = new AtomicBoolean(false);
private final AtomicBoolean jobPaused = new AtomicBoolean(false);
@UriPath(description = "The quartz group name to use. The combination of group name and timer name should be unique.", defaultValue = "Camel")
private String groupName;
@UriPath @Metadata(required = true)
private String triggerName;
@UriParam
private String cron;
@UriParam
private boolean stateful;
@UriParam(label = "scheduler")
private boolean fireNow;
@UriParam(defaultValue = "true")
private boolean deleteJob = true;
@UriParam
private boolean pauseJob;
@UriParam
private boolean durableJob;
@UriParam
private boolean recoverableJob;
@UriParam(label = "scheduler", defaultValue = "500")
private long triggerStartDelay = 500;
@UriParam(label = "scheduler")
private int startDelayedSeconds;
@UriParam(label = "scheduler", defaultValue = "true")
private boolean autoStartScheduler = true;
@UriParam(label = "advanced")
private boolean usingFixedCamelContextName;
@UriParam(label = "advanced")
private boolean prefixJobNameWithEndpointId;
@UriParam(prefix = "trigger.", multiValue = true, label = "advanced")
private Map<String, Object> triggerParameters;
@UriParam(prefix = "job.", multiValue = true, label = "advanced")
private Map<String, Object> jobParameters;
@UriParam(label = "advanced")
private Calendar customCalendar;
public QuartzEndpoint(String uri, QuartzComponent quartzComponent) {
super(uri, quartzComponent);
}
public String getGroupName() {
return triggerKey.getName();
}
public String getTriggerName() {
return triggerKey.getName();
}
/**
* The quartz timer name to use. The combination of group name and timer name should be unique.
*/
public void setTriggerName(String triggerName) {
this.triggerName = triggerName;
}
public String getCron() {
return cron;
}
public boolean isStateful() {
return stateful;
}
public boolean isFireNow() {
return fireNow;
}
public long getTriggerStartDelay() {
return triggerStartDelay;
}
public boolean isDeleteJob() {
return deleteJob;
}
public boolean isPauseJob() {
return pauseJob;
}
/**
* If set to true, then the trigger automatically pauses when route stop.
* Else if set to false, it will remain in scheduler. When set to false, it will also mean user may reuse
* pre-configured trigger with camel Uri. Just ensure the names match.
* Notice you cannot have both deleteJob and pauseJob set to true.
*/
public void setPauseJob(boolean pauseJob) {
this.pauseJob = pauseJob;
}
/**
* In case of scheduler has already started, we want the trigger start slightly after current time to
* ensure endpoint is fully started before the job kicks in.
*/
public void setTriggerStartDelay(long triggerStartDelay) {
this.triggerStartDelay = triggerStartDelay;
}
/**
* If set to true, then the trigger automatically delete when route stop.
* Else if set to false, it will remain in scheduler. When set to false, it will also mean user may reuse
* pre-configured trigger with camel Uri. Just ensure the names match.
* Notice you cannot have both deleteJob and pauseJob set to true.
*/
public void setDeleteJob(boolean deleteJob) {
this.deleteJob = deleteJob;
}
/**
* If it is true will fire the trigger when the route is start when using SimpleTrigger.
*/
public void setFireNow(boolean fireNow) {
this.fireNow = fireNow;
}
/**
* Uses a Quartz @PersistJobDataAfterExecution and @DisallowConcurrentExecution instead of the default job.
*/
public void setStateful(boolean stateful) {
this.stateful = stateful;
}
public boolean isDurableJob() {
return durableJob;
}
/**
* Whether or not the job should remain stored after it is orphaned (no triggers point to it).
*/
public void setDurableJob(boolean durableJob) {
this.durableJob = durableJob;
}
public boolean isRecoverableJob() {
return recoverableJob;
}
/**
* Instructs the scheduler whether or not the job should be re-executed if a 'recovery' or 'fail-over' situation is encountered.
*/
public void setRecoverableJob(boolean recoverableJob) {
this.recoverableJob = recoverableJob;
}
public boolean isUsingFixedCamelContextName() {
return usingFixedCamelContextName;
}
/**
* If it is true, JobDataMap uses the CamelContext name directly to reference the CamelContext,
* if it is false, JobDataMap uses use the CamelContext management name which could be changed during the deploy time.
*/
public void setUsingFixedCamelContextName(boolean usingFixedCamelContextName) {
this.usingFixedCamelContextName = usingFixedCamelContextName;
}
public Map<String, Object> getTriggerParameters() {
return triggerParameters;
}
/**
* To configure additional options on the trigger.
*/
public void setTriggerParameters(Map<String, Object> triggerParameters) {
this.triggerParameters = triggerParameters;
}
public Map<String, Object> getJobParameters() {
return jobParameters;
}
/**
* To configure additional options on the job.
*/
public void setJobParameters(Map<String, Object> jobParameters) {
this.jobParameters = jobParameters;
}
public int getStartDelayedSeconds() {
return startDelayedSeconds;
}
/**
* Seconds to wait before starting the quartz scheduler.
*/
public void setStartDelayedSeconds(int startDelayedSeconds) {
this.startDelayedSeconds = startDelayedSeconds;
}
public boolean isAutoStartScheduler() {
return autoStartScheduler;
}
/**
* Whether or not the scheduler should be auto started.
*/
public void setAutoStartScheduler(boolean autoStartScheduler) {
this.autoStartScheduler = autoStartScheduler;
}
public boolean isPrefixJobNameWithEndpointId() {
return prefixJobNameWithEndpointId;
}
/**
* Whether the job name should be prefixed with endpoint id
* @param prefixJobNameWithEndpointId
*/
public void setPrefixJobNameWithEndpointId(boolean prefixJobNameWithEndpointId) {
this.prefixJobNameWithEndpointId = prefixJobNameWithEndpointId;
}
/**
* Specifies a cron expression to define when to trigger.
*/
public void setCron(String cron) {
this.cron = cron;
}
public TriggerKey getTriggerKey() {
return triggerKey;
}
public void setTriggerKey(TriggerKey triggerKey) {
this.triggerKey = triggerKey;
}
public Calendar getCustomCalendar() {
return customCalendar;
}
/**
* Specifies a custom calendar to avoid specific range of date
*/
public void setCustomCalendar(Calendar customCalendar) {
this.customCalendar = customCalendar;
}
@Override
public Producer createProducer() throws Exception {
throw new UnsupportedOperationException("Quartz producer is not supported.");
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
QuartzConsumer result = new QuartzConsumer(this, processor);
configureConsumer(result);
return result;
}
@Override
protected void doStart() throws Exception {
if (isDeleteJob() && isPauseJob()) {
throw new IllegalArgumentException("Cannot have both options deleteJob and pauseJob enabled");
}
if (ObjectHelper.isNotEmpty(customCalendar)) {
getComponent().getScheduler().addCalendar(QuartzConstants.QUARTZ_CAMEL_CUSTOM_CALENDAR, customCalendar, true, false);
}
addJobInScheduler();
}
@Override
protected void doStop() throws Exception {
removeJobInScheduler();
}
private void removeJobInScheduler() throws Exception {
Scheduler scheduler = getComponent().getScheduler();
if (scheduler == null) {
return;
}
if (deleteJob) {
boolean isClustered = scheduler.getMetaData().isJobStoreClustered();
if (!scheduler.isShutdown() && !isClustered) {
log.info("Deleting job {}", triggerKey);
scheduler.unscheduleJob(triggerKey);
jobAdded.set(false);
}
} else if (pauseJob) {
pauseTrigger();
}
// Decrement camel job count for this endpoint
AtomicInteger number = (AtomicInteger) scheduler.getContext().get(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT);
if (number != null) {
number.decrementAndGet();
}
}
private void addJobInScheduler() throws Exception {
// Add or use existing trigger to/from scheduler
Scheduler scheduler = getComponent().getScheduler();
JobDetail jobDetail;
Trigger oldTrigger = scheduler.getTrigger(triggerKey);
boolean triggerExisted = oldTrigger != null;
if (triggerExisted && !isRecoverableJob()) {
ensureNoDupTriggerKey();
}
jobDetail = createJobDetail();
Trigger trigger = createTrigger(jobDetail);
QuartzHelper.updateJobDataMap(getCamelContext(), jobDetail, getEndpointUri(), isUsingFixedCamelContextName());
if (triggerExisted) {
// Reschedule job if trigger settings were changed
if (hasTriggerChanged(oldTrigger, trigger)) {
scheduler.rescheduleJob(triggerKey, trigger);
}
} else {
try {
// Schedule it now. Remember that scheduler might not be started it, but we can schedule now.
scheduler.scheduleJob(jobDetail, trigger);
} catch (ObjectAlreadyExistsException ex) {
// some other VM might may have stored the job & trigger in DB in clustered mode, in the mean time
if (!(getComponent().isClustered())) {
throw ex;
} else {
trigger = scheduler.getTrigger(triggerKey);
if (trigger == null) {
throw new SchedulerException("Trigger could not be found in quartz scheduler.");
}
}
}
}
if (log.isInfoEnabled()) {
log.info("Job {} (triggerType={}, jobClass={}) is scheduled. Next fire date is {}",
new Object[] {trigger.getKey(), trigger.getClass().getSimpleName(),
jobDetail.getJobClass().getSimpleName(), trigger.getNextFireTime()});
}
// Increase camel job count for this endpoint
AtomicInteger number = (AtomicInteger) scheduler.getContext().get(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT);
if (number != null) {
number.incrementAndGet();
}
jobAdded.set(true);
}
private boolean hasTriggerChanged(Trigger oldTrigger, Trigger newTrigger) {
if (newTrigger instanceof CronTrigger && oldTrigger instanceof CronTrigger) {
CronTrigger newCron = (CronTrigger) newTrigger;
CronTrigger oldCron = (CronTrigger) oldTrigger;
return !newCron.getCronExpression().equals(oldCron.getCronExpression());
} else if (newTrigger instanceof SimpleTrigger && oldTrigger instanceof SimpleTrigger) {
SimpleTrigger newSimple = (SimpleTrigger) newTrigger;
SimpleTrigger oldSimple = (SimpleTrigger) oldTrigger;
return newSimple.getRepeatInterval() != oldSimple.getRepeatInterval()
|| newSimple.getRepeatCount() != oldSimple.getRepeatCount();
} else {
return !newTrigger.getClass().equals(oldTrigger.getClass()) || !newTrigger.equals(oldTrigger);
}
}
private void ensureNoDupTriggerKey() {
for (Route route : getCamelContext().getRoutes()) {
if (route.getEndpoint() instanceof QuartzEndpoint) {
QuartzEndpoint quartzEndpoint = (QuartzEndpoint) route.getEndpoint();
TriggerKey checkTriggerKey = quartzEndpoint.getTriggerKey();
if (triggerKey.equals(checkTriggerKey)) {
throw new IllegalArgumentException("Trigger key " + triggerKey + " is already in use by " + quartzEndpoint);
}
}
}
}
private Trigger createTrigger(JobDetail jobDetail) throws Exception {
Trigger result;
Date startTime = new Date();
if (getComponent().getScheduler().isStarted()) {
startTime = new Date(System.currentTimeMillis() + triggerStartDelay);
}
if (cron != null) {
log.debug("Creating CronTrigger: {}", cron);
String timeZone = (String)triggerParameters.get("timeZone");
if (timeZone != null) {
if (ObjectHelper.isNotEmpty(customCalendar)) {
result = TriggerBuilder.newTrigger()
.withIdentity(triggerKey)
.startAt(startTime)
.withSchedule(cronSchedule(cron)
.withMisfireHandlingInstructionFireAndProceed()
.inTimeZone(TimeZone.getTimeZone(timeZone)))
.modifiedByCalendar(QuartzConstants.QUARTZ_CAMEL_CUSTOM_CALENDAR)
.build();
} else {
result = TriggerBuilder.newTrigger()
.withIdentity(triggerKey)
.startAt(startTime)
.withSchedule(cronSchedule(cron)
.withMisfireHandlingInstructionFireAndProceed()
.inTimeZone(TimeZone.getTimeZone(timeZone)))
.build();
}
jobDetail.getJobDataMap().put(QuartzConstants.QUARTZ_TRIGGER_CRON_TIMEZONE, timeZone);
} else {
if (ObjectHelper.isNotEmpty(customCalendar)) {
result = TriggerBuilder.newTrigger()
.withIdentity(triggerKey)
.startAt(startTime)
.withSchedule(cronSchedule(cron)
.withMisfireHandlingInstructionFireAndProceed())
.modifiedByCalendar(QuartzConstants.QUARTZ_CAMEL_CUSTOM_CALENDAR)
.build();
} else {
result = TriggerBuilder.newTrigger()
.withIdentity(triggerKey)
.startAt(startTime)
.withSchedule(cronSchedule(cron)
.withMisfireHandlingInstructionFireAndProceed())
.build();
}
}
// enrich job map with details
jobDetail.getJobDataMap().put(QuartzConstants.QUARTZ_TRIGGER_TYPE, "cron");
jobDetail.getJobDataMap().put(QuartzConstants.QUARTZ_TRIGGER_CRON_EXPRESSION, cron);
} else {
log.debug("Creating SimpleTrigger.");
int repeat = SimpleTrigger.REPEAT_INDEFINITELY;
String repeatString = (String) triggerParameters.get("repeatCount");
if (repeatString != null) {
repeat = EndpointHelper.resolveParameter(getCamelContext(), repeatString, Integer.class);
// need to update the parameters
triggerParameters.put("repeatCount", repeat);
}
// default use 1 sec interval
long interval = 1000;
String intervalString = (String) triggerParameters.get("repeatInterval");
if (intervalString != null) {
interval = EndpointHelper.resolveParameter(getCamelContext(), intervalString, Long.class);
// need to update the parameters
triggerParameters.put("repeatInterval", interval);
}
TriggerBuilder<SimpleTrigger> triggerBuilder;
if (ObjectHelper.isNotEmpty(customCalendar)) {
triggerBuilder = TriggerBuilder.newTrigger()
.withIdentity(triggerKey)
.startAt(startTime)
.withSchedule(simpleSchedule().withMisfireHandlingInstructionFireNow()
.withRepeatCount(repeat).withIntervalInMilliseconds(interval)).modifiedByCalendar(QuartzConstants.QUARTZ_CAMEL_CUSTOM_CALENDAR);
} else {
triggerBuilder = TriggerBuilder.newTrigger()
.withIdentity(triggerKey)
.startAt(startTime)
.withSchedule(simpleSchedule().withMisfireHandlingInstructionFireNow()
.withRepeatCount(repeat).withIntervalInMilliseconds(interval));
}
if (fireNow) {
triggerBuilder = triggerBuilder.startNow();
}
result = triggerBuilder.build();
// enrich job map with details
jobDetail.getJobDataMap().put(QuartzConstants.QUARTZ_TRIGGER_TYPE, "simple");
jobDetail.getJobDataMap().put(QuartzConstants.QUARTZ_TRIGGER_SIMPLE_REPEAT_COUNTER, repeat);
jobDetail.getJobDataMap().put(QuartzConstants.QUARTZ_TRIGGER_SIMPLE_REPEAT_INTERVAL, interval);
}
if (triggerParameters != null && triggerParameters.size() > 0) {
log.debug("Setting user extra triggerParameters {}", triggerParameters);
setProperties(result, triggerParameters);
}
log.debug("Created trigger={}", result);
return result;
}
private JobDetail createJobDetail() throws Exception {
// Camel endpoint timer will assume one to one for JobDetail and Trigger, so let's use same name as trigger
String name = triggerKey.getName();
String group = triggerKey.getGroup();
Class<? extends Job> jobClass = stateful ? StatefulCamelJob.class : CamelJob.class;
log.debug("Creating new {}.", jobClass.getSimpleName());
JobBuilder builder = JobBuilder.newJob(jobClass)
.withIdentity(name, group);
if (durableJob) {
builder = builder.storeDurably();
}
if (recoverableJob) {
builder = builder.requestRecovery();
}
JobDetail result = builder.build();
// Let user parameters to further set JobDetail properties.
if (jobParameters != null && jobParameters.size() > 0) {
log.debug("Setting user extra jobParameters {}", jobParameters);
setProperties(result, jobParameters);
}
log.debug("Created jobDetail={}", result);
return result;
}
@Override
public QuartzComponent getComponent() {
return (QuartzComponent)super.getComponent();
}
public void pauseTrigger() throws Exception {
Scheduler scheduler = getComponent().getScheduler();
boolean isClustered = scheduler.getMetaData().isJobStoreClustered();
if (jobPaused.get() || isClustered) {
return;
}
jobPaused.set(true);
if (!scheduler.isShutdown()) {
log.info("Pausing trigger {}", triggerKey);
scheduler.pauseTrigger(triggerKey);
}
}
public void resumeTrigger() throws Exception {
if (!jobPaused.get()) {
return;
}
jobPaused.set(false);
Scheduler scheduler = getComponent().getScheduler();
if (scheduler != null) {
log.info("Resuming trigger {}", triggerKey);
scheduler.resumeTrigger(triggerKey);
}
}
public void onConsumerStart(QuartzConsumer quartzConsumer) throws Exception {
this.processor = quartzConsumer.getAsyncProcessor();
if (!jobAdded.get()) {
addJobInScheduler();
} else {
resumeTrigger();
}
}
public void onConsumerStop(QuartzConsumer quartzConsumer) throws Exception {
if (jobAdded.get()) {
pauseTrigger();
}
this.processor = null;
}
AsyncProcessor getProcessor() {
return this.processor;
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.experiment.catalog.model;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.*;
import java.sql.Timestamp;
import java.util.Collection;
@Entity
@Table(name = "PROCESS")
public class Process {
private final static Logger logger = LoggerFactory.getLogger(Process.class);
private String processId;
private String experimentId;
private Timestamp creationTime;
private Timestamp lastUpdateTime;
private String processDetail;
private String applicationInterfaceId;
private String taskDag;
private String applicationDeploymentId;
private String computeResourceId;
private String gatewayExecutionId;
private boolean enableEmailNotification;
private String emailAddresses;
private String storageId;
private String experimentDataDir;
private String userName;
private Experiment experiment;
private Collection<ProcessError> processErrors;
private Collection<ProcessInput> processInputs;
private Collection<ProcessOutput> processOutputs;
private ProcessResourceSchedule processResourceSchedule;
private Collection<ProcessStatus> processStatuses;
private Collection<Task> tasks;
private String userDn;
private boolean generateCert;
private boolean useUserCRPref;
@Id
@Column(name = "PROCESS_ID")
public String getProcessId() {
return processId;
}
public void setProcessId(String processId) {
this.processId = processId;
}
@Column(name = "EXPERIMENT_ID")
public String getExperimentId() {
return experimentId;
}
public void setExperimentId(String experimentId) {
this.experimentId = experimentId;
}
@Column(name = "CREATION_TIME")
public Timestamp getCreationTime() {
return creationTime;
}
public void setCreationTime(Timestamp creationTime) {
this.creationTime = creationTime;
}
@Column(name = "LAST_UPDATE_TIME")
public Timestamp getLastUpdateTime() {
return lastUpdateTime;
}
public void setLastUpdateTime(Timestamp lastUpdateTime) {
this.lastUpdateTime = lastUpdateTime;
}
@Lob
@Column(name = "PROCESS_DETAIL")
public String getProcessDetail() {
return processDetail;
}
public void setProcessDetail(String processDetail) {
this.processDetail = processDetail;
}
@Column(name = "APPLICATION_INTERFACE_ID")
public String getApplicationInterfaceId() {
return applicationInterfaceId;
}
public void setApplicationInterfaceId(String applicationInterfaceId) {
this.applicationInterfaceId = applicationInterfaceId;
}
@Column(name = "USERNAME")
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
@Column(name = "STORAGE_RESOURCE_ID")
public String getStorageId() {
return storageId;
}
public void setStorageId(String storageId) {
this.storageId = storageId;
}
@Lob
@Column(name = "TASK_DAG")
public String getTaskDag() {
return taskDag;
}
public void setTaskDag(String taskDag) {
this.taskDag = taskDag;
}
@Column(name = "APPLICATION_DEPLOYMENT_ID")
public String getApplicationDeploymentId() {
return applicationDeploymentId;
}
public void setApplicationDeploymentId(String applicationDeploymentId) {
this.applicationDeploymentId = applicationDeploymentId;
}
@Column(name = "COMPUTE_RESOURCE_ID")
public String getComputeResourceId() {
return computeResourceId;
}
public void setComputeResourceId(String computeResourceId) {
this.computeResourceId = computeResourceId;
}
@Column(name = "GATEWAY_EXECUTION_ID")
public String getGatewayExecutionId() {
return gatewayExecutionId;
}
public void setGatewayExecutionId(String gatewayExecutionId) {
this.gatewayExecutionId = gatewayExecutionId;
}
@Column(name = "ENABLE_EMAIL_NOTIFICATION")
public boolean getEnableEmailNotification() {
return enableEmailNotification;
}
public void setEnableEmailNotification(boolean enableEmailNotification) {
this.enableEmailNotification = enableEmailNotification;
}
@Lob
@Column(name = "EMAIL_ADDRESSES")
public String getEmailAddresses() {
return emailAddresses;
}
public void setEmailAddresses(String emailAddresses) {
this.emailAddresses = emailAddresses;
}
@Column(name = "USER_DN")
public String getUserDn() {
return userDn;
}
public void setUserDn(String userDn) {
this.userDn = userDn;
}
@Column(name = "GENERATE_CERT")
public boolean getGenerateCert() {
return generateCert;
}
public void setGenerateCert(boolean generateCert) {
this.generateCert = generateCert;
}
@Column(name = "EXPERIMENT_DATA_DIR")
public String getExperimentDataDir() {
return experimentDataDir;
}
public void setExperimentDataDir(String experimentDataDir) {
this.experimentDataDir = experimentDataDir;
}
// @Override
// public boolean equals(Object o) {
// if (this == o) return true;
// if (o == null || getClass() != o.getClass()) return false;
//
// Process process = (Process) o;
//
// if (applicationInterfaceId != null ? !applicationInterfaceId.equals(process.applicationInterfaceId) : process.applicationInterfaceId != null)
// return false;
// if (creationTime != null ? !creationTime.equals(process.creationTime) : process.creationTime != null)
// return false;
// if (experimentId != null ? !experimentId.equals(process.experimentId) : process.experimentId != null)
// return false;
// if (lastUpdateTime != null ? !lastUpdateTime.equals(process.lastUpdateTime) : process.lastUpdateTime != null)
// return false;
// if (processDetail != null ? !processDetail.equals(process.processDetail) : process.processDetail != null)
// return false;
// if (processId != null ? !processId.equals(process.processId) : process.processId != null) return false;
// if (taskDag != null ? !taskDag.equals(process.taskDag) : process.taskDag != null) return false;
//
// return true;
// }
//
// @Override
// public int hashCode() {
// int result = processId != null ? processId.hashCode() : 0;
// result = 31 * result + (experimentId != null ? experimentId.hashCode() : 0);
// result = 31 * result + (creationTime != null ? creationTime.hashCode() : 0);
// result = 31 * result + (lastUpdateTime != null ? lastUpdateTime.hashCode() : 0);
// result = 31 * result + (processDetail != null ? processDetail.hashCode() : 0);
// result = 31 * result + (applicationInterfaceId != null ? applicationInterfaceId.hashCode() : 0);
// result = 31 * result + (taskDag != null ? taskDag.hashCode() : 0);
// return result;
// }
@ManyToOne
@JoinColumn(name = "EXPERIMENT_ID", referencedColumnName = "EXPERIMENT_ID")
public Experiment getExperiment() {
return experiment;
}
public void setExperiment(Experiment experimentByExperimentId) {
this.experiment = experimentByExperimentId;
}
@OneToMany(mappedBy = "process")
public Collection<ProcessError> getProcessErrors() {
return processErrors;
}
public void setProcessErrors(Collection<ProcessError> processErrorsByProcessId) {
this.processErrors = processErrorsByProcessId;
}
@OneToMany(mappedBy = "process")
public Collection<ProcessInput> getProcessInputs() {
return processInputs;
}
public void setProcessInputs(Collection<ProcessInput> processInputsByProcessId) {
this.processInputs = processInputsByProcessId;
}
@OneToMany(mappedBy = "process")
public Collection<ProcessOutput> getProcessOutputs() {
return processOutputs;
}
public void setProcessOutputs(Collection<ProcessOutput> processOutputsByProcessId) {
this.processOutputs = processOutputsByProcessId;
}
@OneToOne(mappedBy = "process")
public ProcessResourceSchedule getProcessResourceSchedule() {
return processResourceSchedule;
}
public void setProcessResourceSchedule(ProcessResourceSchedule processResourceSchedulesByProcessId) {
this.processResourceSchedule = processResourceSchedulesByProcessId;
}
@OneToMany(mappedBy = "process")
public Collection<ProcessStatus> getProcessStatuses() {
return processStatuses;
}
public void setProcessStatuses(Collection<ProcessStatus> processStatusesByProcessId) {
this.processStatuses = processStatusesByProcessId;
}
@OneToMany(mappedBy = "process")
public Collection<Task> getTasks() {
return tasks;
}
public void setTasks(Collection<Task> taskByProcessId) {
this.tasks = taskByProcessId;
}
@Column(name = "USE_USER_CR_PREF")
public boolean isUseUserCRPref() {
return useUserCRPref;
}
public void setUseUserCRPref(boolean useUserCRPref) {
this.useUserCRPref = useUserCRPref;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform.cache;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import javax.cache.Cache;
import javax.cache.integration.CompletionListener;
import javax.cache.processor.EntryProcessorException;
import javax.cache.processor.EntryProcessorResult;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.binary.BinaryRawReader;
import org.apache.ignite.binary.BinaryRawWriter;
import org.apache.ignite.cache.CacheEntryProcessor;
import org.apache.ignite.cache.CacheMetrics;
import org.apache.ignite.cache.CachePartialUpdateException;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.query.Query;
import org.apache.ignite.cache.query.QueryMetrics;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.cache.query.TextQuery;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.binary.BinaryRawReaderEx;
import org.apache.ignite.internal.binary.BinaryRawWriterEx;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheOperationContext;
import org.apache.ignite.internal.processors.cache.CachePartialUpdateCheckedException;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionTopology;
import org.apache.ignite.internal.processors.cache.query.QueryCursorEx;
import org.apache.ignite.internal.processors.platform.PlatformAbstractTarget;
import org.apache.ignite.internal.processors.platform.PlatformContext;
import org.apache.ignite.internal.processors.platform.PlatformNativeException;
import org.apache.ignite.internal.processors.platform.PlatformTarget;
import org.apache.ignite.internal.processors.platform.cache.expiry.PlatformExpiryPolicy;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformContinuousQuery;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformContinuousQueryProxy;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformFieldsQueryCursor;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformQueryCursor;
import org.apache.ignite.internal.processors.platform.memory.PlatformMemory;
import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream;
import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformFutureUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformListenable;
import org.apache.ignite.internal.processors.platform.utils.PlatformUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformWriterClosure;
import org.apache.ignite.internal.processors.query.QueryUtils;
import org.apache.ignite.internal.util.GridConcurrentFactory;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.transactions.TransactionDeadlockException;
import org.apache.ignite.transactions.TransactionTimeoutException;
import org.jetbrains.annotations.Nullable;
/**
* Native cache wrapper implementation.
*/
@SuppressWarnings({"unchecked", "WeakerAccess", "rawtypes"})
public class PlatformCache extends PlatformAbstractTarget {
/** */
public static final int OP_CLEAR = 1;
/** */
public static final int OP_CLEAR_ALL = 2;
/** */
public static final int OP_CONTAINS_KEY = 3;
/** */
public static final int OP_CONTAINS_KEYS = 4;
/** */
public static final int OP_GET = 5;
/** */
public static final int OP_GET_ALL = 6;
/** */
public static final int OP_GET_AND_PUT = 7;
/** */
public static final int OP_GET_AND_PUT_IF_ABSENT = 8;
/** */
public static final int OP_GET_AND_REMOVE = 9;
/** */
public static final int OP_GET_AND_REPLACE = 10;
/** */
public static final int OP_GET_NAME = 11;
/** */
public static final int OP_INVOKE = 12;
/** */
public static final int OP_INVOKE_ALL = 13;
/** */
public static final int OP_IS_LOCAL_LOCKED = 14;
/** */
public static final int OP_LOAD_CACHE = 15;
/** */
public static final int OP_LOC_EVICT = 16;
/** */
public static final int OP_LOC_LOAD_CACHE = 17;
/** */
public static final int OP_LOCAL_CLEAR = 20;
/** */
public static final int OP_LOCAL_CLEAR_ALL = 21;
/** */
public static final int OP_LOCK = 22;
/** */
public static final int OP_LOCK_ALL = 23;
/** */
public static final int OP_LOCAL_METRICS = 24;
/** */
private static final int OP_PEEK = 25;
/** */
private static final int OP_PUT = 26;
/** */
private static final int OP_PUT_ALL = 27;
/** */
public static final int OP_PUT_IF_ABSENT = 28;
/** */
public static final int OP_QRY_CONTINUOUS = 29;
/** */
public static final int OP_QRY_SCAN = 30;
/** */
public static final int OP_QRY_SQL = 31;
/** */
public static final int OP_QRY_SQL_FIELDS = 32;
/** */
public static final int OP_QRY_TXT = 33;
/** */
public static final int OP_REMOVE_ALL = 34;
/** */
public static final int OP_REMOVE_BOOL = 35;
/** */
public static final int OP_REMOVE_OBJ = 36;
/** */
public static final int OP_REPLACE_2 = 37;
/** */
public static final int OP_REPLACE_3 = 38;
/** */
public static final int OP_GET_CONFIG = 39;
/** */
public static final int OP_LOAD_ALL = 40;
/** */
public static final int OP_CLEAR_CACHE = 41;
/** */
public static final int OP_WITH_PARTITION_RECOVER = 42;
/** */
public static final int OP_REMOVE_ALL2 = 43;
/** */
public static final int OP_WITH_KEEP_BINARY = 44;
/** */
public static final int OP_WITH_EXPIRY_POLICY = 45;
/** */
public static final int OP_WITH_NO_RETRIES = 46;
/** */
public static final int OP_WITH_SKIP_STORE = 47;
/** */
public static final int OP_SIZE = 48;
/** */
public static final int OP_ITERATOR = 49;
/** */
public static final int OP_LOC_ITERATOR = 50;
/** */
public static final int OP_ENTER_LOCK = 51;
/** */
public static final int OP_EXIT_LOCK = 52;
/** */
public static final int OP_TRY_ENTER_LOCK = 53;
/** */
public static final int OP_CLOSE_LOCK = 54;
/** */
public static final int OP_REBALANCE = 55;
/** */
public static final int OP_SIZE_LOC = 56;
/** */
public static final int OP_PUT_ASYNC = 57;
/** */
public static final int OP_CLEAR_CACHE_ASYNC = 58;
/** */
public static final int OP_CLEAR_ALL_ASYNC = 59;
/** */
public static final int OP_REMOVE_ALL2_ASYNC = 60;
/** */
public static final int OP_SIZE_ASYNC = 61;
/** */
public static final int OP_CLEAR_ASYNC = 62;
/** */
public static final int OP_LOAD_CACHE_ASYNC = 63;
/** */
public static final int OP_LOC_LOAD_CACHE_ASYNC = 64;
/** */
public static final int OP_PUT_ALL_ASYNC = 65;
/** */
public static final int OP_REMOVE_ALL_ASYNC = 66;
/** */
public static final int OP_GET_ASYNC = 67;
/** */
public static final int OP_CONTAINS_KEY_ASYNC = 68;
/** */
public static final int OP_CONTAINS_KEYS_ASYNC = 69;
/** */
public static final int OP_REMOVE_BOOL_ASYNC = 70;
/** */
public static final int OP_REMOVE_OBJ_ASYNC = 71;
/** */
public static final int OP_GET_ALL_ASYNC = 72;
/** */
public static final int OP_GET_AND_PUT_ASYNC = 73;
/** */
public static final int OP_GET_AND_PUT_IF_ABSENT_ASYNC = 74;
/** */
public static final int OP_GET_AND_REMOVE_ASYNC = 75;
/** */
public static final int OP_GET_AND_REPLACE_ASYNC = 76;
/** */
public static final int OP_REPLACE_2_ASYNC = 77;
/** */
public static final int OP_REPLACE_3_ASYNC = 78;
/** */
public static final int OP_INVOKE_ASYNC = 79;
/** */
public static final int OP_INVOKE_ALL_ASYNC = 80;
/** */
public static final int OP_PUT_IF_ABSENT_ASYNC = 81;
/** */
public static final int OP_EXTENSION = 82;
/** */
public static final int OP_GLOBAL_METRICS = 83;
/** */
public static final int OP_GET_LOST_PARTITIONS = 84;
/** */
public static final int OP_QUERY_METRICS = 85;
/** */
public static final int OP_RESET_QUERY_METRICS = 86;
/** */
public static final int OP_PRELOAD_PARTITION = 87;
/** */
public static final int OP_PRELOAD_PARTITION_ASYNC = 88;
/** */
public static final int OP_LOCAL_PRELOAD_PARTITION = 89;
/** */
public static final int OP_SIZE_LONG = 90;
/** */
public static final int OP_SIZE_LONG_ASYNC = 91;
/** */
public static final int OP_SIZE_LONG_LOC = 92;
/** */
public static final int OP_ENABLE_STATISTICS = 93;
/** */
public static final int OP_CLEAR_STATISTICS = 94;
/** */
private static final int OP_PUT_WITH_PLATFORM_CACHE = 95;
/** */
private static final int OP_RESERVE_PARTITION = 96;
/** */
private static final int OP_RELEASE_PARTITION = 97;
/** Underlying JCache in binary mode. */
private final IgniteCacheProxy cache;
/** Initial JCache (not in binary mode). */
private final IgniteCache rawCache;
/** Whether this cache is created with "keepBinary" flag on the other side. */
private final boolean keepBinary;
/** */
private static final PlatformFutureUtils.Writer WRITER_GET_ALL = new GetAllWriter();
/** */
private static final PlatformFutureUtils.Writer WRITER_INVOKE = new EntryProcessorInvokeWriter();
/** */
private static final PlatformFutureUtils.Writer WRITER_INVOKE_ALL = new EntryProcessorInvokeAllWriter();
/** Map with currently active locks. */
private final ConcurrentMap<Long, Lock> lockMap = GridConcurrentFactory.newMap();
/** Lock ID sequence. */
private static final AtomicLong LOCK_ID_GEN = new AtomicLong();
/** Extensions. */
private final PlatformCacheExtension[] exts;
/**
* Constructor.
*
* @param platformCtx Context.
* @param cache Underlying cache.
* @param keepBinary Keep binary flag.
*/
@SuppressWarnings("ZeroLengthArrayAllocation")
public PlatformCache(PlatformContext platformCtx, IgniteCache cache, boolean keepBinary) {
this(platformCtx, cache, keepBinary, new PlatformCacheExtension[0]);
}
/**
* Constructor.
*
* @param platformCtx Context.
* @param cache Underlying cache.
* @param keepBinary Keep binary flag.
* @param exts Extensions.
*/
public PlatformCache(PlatformContext platformCtx, IgniteCache cache, boolean keepBinary,
PlatformCacheExtension[] exts) {
super(platformCtx);
assert cache != null;
assert exts != null;
rawCache = cache;
IgniteCache binCache = cache.withKeepBinary();
this.cache = (IgniteCacheProxy)binCache;
this.keepBinary = keepBinary;
this.exts = exts;
}
/**
* @return Raw cache.
*/
public IgniteCache rawCache() {
return rawCache;
}
/** {@inheritDoc} */
@Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader, PlatformMemory mem)
throws IgniteCheckedException {
try {
switch (type) {
case OP_PUT:
cache.put(reader.readObjectDetached(), reader.readObjectDetached());
return TRUE;
case OP_GET:
return writeResult(mem, cache.get(reader.readObjectDetached()));
case OP_REMOVE_BOOL:
return cache.remove(reader.readObjectDetached(), reader.readObjectDetached()) ? TRUE : FALSE;
case OP_REMOVE_ALL:
cache.removeAll(PlatformUtils.readSet(reader));
return TRUE;
case OP_PUT_ALL:
cache.putAll(PlatformUtils.readMap(reader));
return TRUE;
case OP_LOC_EVICT:
cache.localEvict(PlatformUtils.readCollection(reader));
return TRUE;
case OP_CONTAINS_KEY:
return cache.containsKey(reader.readObjectDetached()) ? TRUE : FALSE;
case OP_CONTAINS_KEYS:
return cache.containsKeys(PlatformUtils.readSet(reader)) ? TRUE : FALSE;
case OP_REPLACE_3:
return cache.replace(reader.readObjectDetached(), reader.readObjectDetached(),
reader.readObjectDetached()) ? TRUE : FALSE;
case OP_LOC_LOAD_CACHE:
loadCache0(reader, true);
return TRUE;
case OP_LOAD_CACHE:
loadCache0(reader, false);
return TRUE;
case OP_CLEAR:
cache.clear(reader.readObjectDetached());
return TRUE;
case OP_CLEAR_ALL:
cache.clearAll(PlatformUtils.readSet(reader));
return TRUE;
case OP_LOCAL_CLEAR:
cache.localClear(reader.readObjectDetached());
return TRUE;
case OP_LOCAL_CLEAR_ALL:
cache.localClearAll(PlatformUtils.readSet(reader));
return TRUE;
case OP_PUT_IF_ABSENT:
return cache.putIfAbsent(reader.readObjectDetached(), reader.readObjectDetached()) ? TRUE : FALSE;
case OP_REPLACE_2:
return cache.replace(reader.readObjectDetached(), reader.readObjectDetached()) ? TRUE : FALSE;
case OP_REMOVE_OBJ:
return cache.remove(reader.readObjectDetached()) ? TRUE : FALSE;
case OP_IS_LOCAL_LOCKED:
return cache.isLocalLocked(reader.readObjectDetached(), reader.readBoolean()) ? TRUE : FALSE;
case OP_LOAD_ALL: {
boolean replaceExisting = reader.readBoolean();
Set<Object> keys = PlatformUtils.readSet(reader);
long futId = reader.readLong();
int futTyp = reader.readInt();
CompletionListenable fut = new CompletionListenable();
PlatformFutureUtils.listen(platformCtx, fut, futId, futTyp, null, this);
cache.loadAll(keys, replaceExisting, fut);
return TRUE;
}
case OP_GET_AND_PUT:
return writeResult(mem, cache.getAndPut(reader.readObjectDetached(), reader.readObjectDetached()));
case OP_GET_AND_REPLACE:
return writeResult(mem, cache.getAndReplace(reader.readObjectDetached(), reader.readObjectDetached()));
case OP_GET_AND_REMOVE:
return writeResult(mem, cache.getAndRemove(reader.readObjectDetached()));
case OP_GET_AND_PUT_IF_ABSENT:
return writeResult(mem, cache.getAndPutIfAbsent(reader.readObjectDetached(), reader.readObjectDetached()));
case OP_PEEK: {
Object key = reader.readObjectDetached();
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes(reader.readInt());
return writeResult(mem, cache.localPeek(key, modes));
}
case OP_TRY_ENTER_LOCK: {
try {
long id = reader.readLong();
long timeout = reader.readLong();
boolean res = timeout == -1
? lock(id).tryLock()
: lock(id).tryLock(timeout, TimeUnit.MILLISECONDS);
return res ? TRUE : FALSE;
}
catch (InterruptedException e) {
throw new IgniteCheckedException(e);
}
}
case OP_GET_ALL: {
Set keys = PlatformUtils.readSet(reader);
Map entries = cache.getAll(keys);
return writeResult(mem, entries, new PlatformWriterClosure<Map>() {
@Override public void write(BinaryRawWriterEx writer, Map val) {
PlatformUtils.writeNullableMap(writer, val);
}
});
}
case OP_PUT_ASYNC: {
readAndListenFuture(reader,
cache.putAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
}
case OP_CLEAR_CACHE_ASYNC: {
readAndListenFuture(reader, cache.clearAsync());
return TRUE;
}
case OP_CLEAR_ALL_ASYNC: {
readAndListenFuture(reader, cache.clearAllAsync(PlatformUtils.readSet(reader)));
return TRUE;
}
case OP_REMOVE_ALL2_ASYNC: {
readAndListenFuture(reader, cache.removeAllAsync());
return TRUE;
}
case OP_SIZE_ASYNC: {
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes(reader.readInt());
readAndListenFuture(reader, cache.sizeAsync(modes));
return TRUE;
}
case OP_SIZE_LONG_ASYNC: {
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes(reader.readInt());
Integer part = reader.readBoolean() ? reader.readInt() : null;
readAndListenFuture(reader, part != null ? cache.sizeLongAsync(part, modes) :
cache.sizeLongAsync(modes));
return TRUE;
}
case OP_CLEAR_ASYNC: {
readAndListenFuture(reader, cache.clearAsync(reader.readObjectDetached()));
return TRUE;
}
case OP_LOAD_CACHE_ASYNC: {
readAndListenFuture(reader, loadCacheAsync0(reader, false));
return TRUE;
}
case OP_LOC_LOAD_CACHE_ASYNC: {
readAndListenFuture(reader, loadCacheAsync0(reader, true));
return TRUE;
}
case OP_PUT_ALL_ASYNC:
readAndListenFuture(reader, cache.putAllAsync(PlatformUtils.readMap(reader)));
return TRUE;
case OP_REMOVE_ALL_ASYNC:
readAndListenFuture(reader, cache.removeAllAsync(PlatformUtils.readSet(reader)));
return TRUE;
case OP_REBALANCE:
readAndListenFuture(reader, cache.rebalance());
return TRUE;
case OP_GET_ASYNC:
readAndListenFuture(reader, cache.getAsync(reader.readObjectDetached()));
return TRUE;
case OP_CONTAINS_KEY_ASYNC:
readAndListenFuture(reader, cache.containsKeyAsync(reader.readObjectDetached()));
return TRUE;
case OP_CONTAINS_KEYS_ASYNC:
readAndListenFuture(reader, cache.containsKeysAsync(PlatformUtils.readSet(reader)));
return TRUE;
case OP_REMOVE_OBJ_ASYNC:
readAndListenFuture(reader, cache.removeAsync(reader.readObjectDetached()));
return TRUE;
case OP_REMOVE_BOOL_ASYNC:
readAndListenFuture(reader,
cache.removeAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
case OP_GET_ALL_ASYNC: {
Set keys = PlatformUtils.readSet(reader);
readAndListenFuture(reader, cache.getAllAsync(keys), WRITER_GET_ALL);
return TRUE;
}
case OP_GET_AND_PUT_ASYNC:
readAndListenFuture(reader,
cache.getAndPutAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
case OP_GET_AND_PUT_IF_ABSENT_ASYNC:
readAndListenFuture(reader,
cache.getAndPutIfAbsentAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
case OP_GET_AND_REMOVE_ASYNC:
readAndListenFuture(reader, cache.getAndRemoveAsync(reader.readObjectDetached()));
return TRUE;
case OP_GET_AND_REPLACE_ASYNC:
readAndListenFuture(reader,
cache.getAndReplaceAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
case OP_REPLACE_2_ASYNC:
readAndListenFuture(reader,
cache.replaceAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
case OP_REPLACE_3_ASYNC:
readAndListenFuture(reader,
cache.replaceAsync(reader.readObjectDetached(), reader.readObjectDetached(),
reader.readObjectDetached()));
return TRUE;
case OP_INVOKE_ASYNC: {
Object key = reader.readObjectDetached();
long ptr = reader.readLong();
CacheEntryProcessor proc = platformCtx.createCacheEntryProcessor(reader.readObjectDetached(), ptr);
readAndListenFuture(reader, cache.invokeAsync(key, proc), WRITER_INVOKE);
return TRUE;
}
case OP_INVOKE_ALL_ASYNC: {
Set<Object> keys = PlatformUtils.readSet(reader);
long ptr = reader.readLong();
CacheEntryProcessor proc = platformCtx.createCacheEntryProcessor(reader.readObjectDetached(), ptr);
readAndListenFuture(reader, cache.invokeAllAsync(keys, proc), WRITER_INVOKE_ALL);
return TRUE;
}
case OP_PUT_IF_ABSENT_ASYNC:
readAndListenFuture(reader,
cache.putIfAbsentAsync(reader.readObjectDetached(), reader.readObjectDetached()));
return TRUE;
case OP_INVOKE: {
Object key = reader.readObjectDetached();
long ptr = reader.readLong();
CacheEntryProcessor proc = platformCtx.createCacheEntryProcessor(reader.readObjectDetached(), ptr);
return writeResult(mem, cache.invoke(key, proc));
}
case OP_INVOKE_ALL: {
Set<Object> keys = PlatformUtils.readSet(reader);
long ptr = reader.readLong();
CacheEntryProcessor proc = platformCtx.createCacheEntryProcessor(reader.readObjectDetached(), ptr);
Map results = cache.invokeAll(keys, proc);
return writeResult(mem, results, new PlatformWriterClosure<Map>() {
@Override public void write(BinaryRawWriterEx writer, Map val) {
writeInvokeAllResult(writer, val);
}
});
}
case OP_LOCK: {
long id = registerLock(cache.lock(reader.readObjectDetached()));
return writeResult(mem, id, new PlatformWriterClosure<Long>() {
@Override public void write(BinaryRawWriterEx writer, Long val) {
writer.writeLong(val);
}
});
}
case OP_LOCK_ALL: {
long id = registerLock(cache.lockAll(PlatformUtils.readCollection(reader)));
return writeResult(mem, id, new PlatformWriterClosure<Long>() {
@Override public void write(BinaryRawWriterEx writer, Long val) {
writer.writeLong(val);
}
});
}
case OP_EXTENSION:
PlatformCacheExtension ext = extension(reader.readInt());
return ext.processInOutStreamLong(this, reader.readInt(), reader, mem);
case OP_PRELOAD_PARTITION_ASYNC:
readAndListenFuture(reader, cache.preloadPartitionAsync(reader.readInt()));
return TRUE;
case OP_LOCAL_PRELOAD_PARTITION:
return cache.localPreloadPartition(reader.readInt()) ? TRUE : FALSE;
case OP_SIZE_LONG:
case OP_SIZE_LONG_LOC: {
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes(reader.readInt());
Integer part = reader.readBoolean() ? reader.readInt() : null;
if (type == OP_SIZE_LONG)
return part != null ? cache.sizeLong(part, modes) : cache.sizeLong(modes);
else
return part != null ? cache.localSizeLong(part, modes) : cache.localSizeLong(modes);
}
case OP_PUT_WITH_PLATFORM_CACHE:
platformCtx.enableThreadLocalForPlatformCacheUpdate();
try {
cache.put(reader.readObjectDetached(), reader.readObjectDetached());
} finally {
platformCtx.disableThreadLocalForPlatformCacheUpdate();
}
return TRUE;
}
}
catch (Exception e) {
PlatformOutputStream out = mem.output();
BinaryRawWriterEx writer = platformCtx.writer(out);
Exception err = convertException(e);
PlatformUtils.writeError(err, writer);
PlatformUtils.writeErrorData(err, writer);
out.synchronize();
return ERROR;
}
return super.processInStreamOutLong(type, reader, mem);
}
/**
* Writes the result to reused stream, if any.
*/
public long writeResult(PlatformMemory mem, Object obj) {
return writeResult(mem, obj, null);
}
/**
* Writes the result to reused stream, if any.
*/
public long writeResult(PlatformMemory mem, Object obj, PlatformWriterClosure clo) {
if (obj == null)
return FALSE;
PlatformOutputStream out = mem.output();
BinaryRawWriterEx writer = platformCtx.writer(out);
if (clo == null)
writer.writeObjectDetached(obj);
else
clo.write(writer, obj);
out.synchronize();
return TRUE;
}
/**
* Loads cache via localLoadCache or loadCache.
*
* @param reader Binary reader.
* @param loc Local flag.
* @return Cache async operation future.
*/
private void loadCache0(BinaryRawReaderEx reader, boolean loc) {
PlatformCacheEntryFilter filter = createPlatformCacheEntryFilter(reader);
Object[] args = readLoadCacheArgs(reader);
if (loc)
cache.localLoadCache(filter, args);
else
cache.loadCache(filter, args);
}
/**
* Asynchronously loads cache via localLoadCacheAsync or loadCacheAsync.
*
* @param reader Binary reader.
* @param loc Local flag.
* @return Cache async operation future.
*/
private IgniteFuture<Void> loadCacheAsync0(BinaryRawReaderEx reader, boolean loc) {
PlatformCacheEntryFilter filter = createPlatformCacheEntryFilter(reader);
Object[] args = readLoadCacheArgs(reader);
if (loc)
return cache.localLoadCacheAsync(filter, args);
else
return cache.loadCacheAsync(filter, args);
}
/**
* @param reader Binary reader.
* @return created object.
*/
@Nullable private PlatformCacheEntryFilter createPlatformCacheEntryFilter(BinaryRawReaderEx reader) {
PlatformCacheEntryFilter filter = null;
Object pred = reader.readObjectDetached();
if (pred != null)
filter = platformCtx.createCacheEntryFilter(pred, 0);
return filter;
}
/**
* @param reader Binary reader.
* @return Arguments array.
*/
@Nullable private Object[] readLoadCacheArgs(BinaryRawReaderEx reader) {
Object[] args = null;
int argCnt = reader.readInt();
if (argCnt > 0) {
args = new Object[argCnt];
for (int i = 0; i < argCnt; i++)
args[i] = reader.readObjectDetached();
}
return args;
}
/** {@inheritDoc} */
@Override public PlatformTarget processInStreamOutObject(int type, BinaryRawReaderEx reader)
throws IgniteCheckedException {
switch (type) {
case OP_QRY_SQL:
return runQuery(readSqlQuery(reader));
case OP_QRY_SQL_FIELDS:
return runFieldsQuery(readFieldsQuery(reader));
case OP_QRY_TXT:
return runQuery(readTextQuery(reader));
case OP_QRY_SCAN:
return runQuery(readScanQuery(reader));
case OP_QRY_CONTINUOUS: {
long ptr = reader.readLong();
boolean loc = reader.readBoolean();
boolean hasFilter = reader.readBoolean();
Object filter = reader.readObjectDetached();
int bufSize = reader.readInt();
long timeInterval = reader.readLong();
boolean autoUnsubscribe = reader.readBoolean();
Query initQry = readInitialQuery(reader);
PlatformContinuousQuery qry = platformCtx.createContinuousQuery(ptr, hasFilter, filter);
qry.start(cache, loc, bufSize, timeInterval, autoUnsubscribe, initQry);
return new PlatformContinuousQueryProxy(platformCtx, qry);
}
case OP_WITH_EXPIRY_POLICY: {
long create = reader.readLong();
long update = reader.readLong();
long access = reader.readLong();
IgniteCache cache0 = rawCache.withExpiryPolicy(new PlatformExpiryPolicy(create, update, access));
return copy(cache0, keepBinary);
}
case OP_LOC_ITERATOR: {
int peekModes = reader.readInt();
CachePeekMode[] peekModes0 = PlatformUtils.decodeCachePeekModes(peekModes);
Iterator<Cache.Entry> iter = cache.localEntries(peekModes0).iterator();
return new PlatformCacheIterator(platformCtx, iter);
}
default:
return super.processInStreamOutObject(type, reader);
}
}
/**
* Read arguments for SQL query.
*
* @param reader Reader.
* @return Arguments.
*/
@Nullable public static Object[] readQueryArgs(BinaryRawReaderEx reader) {
int cnt = reader.readInt();
if (cnt > 0) {
Object[] args = new Object[cnt];
for (int i = 0; i < cnt; i++)
args[i] = reader.readObjectDetached();
return args;
}
else
return null;
}
/** {@inheritDoc} */
@Override public void processOutStream(int type, BinaryRawWriterEx writer) throws IgniteCheckedException {
switch (type) {
case OP_GET_NAME:
writer.writeObject(cache.getName());
break;
case OP_LOCAL_METRICS: {
CacheMetrics metrics = cache.localMetrics();
writeCacheMetrics(writer, metrics);
break;
}
case OP_GLOBAL_METRICS: {
CacheMetrics metrics = cache.metrics();
writeCacheMetrics(writer, metrics);
break;
}
case OP_GET_CONFIG:
CacheConfiguration ccfg = ((IgniteCache<Object, Object>)cache).
getConfiguration(CacheConfiguration.class);
PlatformConfigurationUtils.writeCacheConfiguration(writer, ccfg);
break;
case OP_GET_LOST_PARTITIONS:
Collection<Integer> parts = cache.lostPartitions();
writer.writeInt(parts.size());
for (int p : parts) {
writer.writeInt(p);
}
break;
case OP_QUERY_METRICS: {
QueryMetrics metrics = cache.queryMetrics();
writeQueryMetrics(writer, metrics);
break;
}
default:
super.processOutStream(type, writer);
}
}
/** {@inheritDoc} */
@Override public PlatformTarget processOutObject(int type) throws IgniteCheckedException {
switch (type) {
case OP_WITH_PARTITION_RECOVER: {
return copy(rawCache.withPartitionRecover(), keepBinary);
}
case OP_WITH_KEEP_BINARY: {
if (keepBinary)
return this;
return copy(rawCache.withKeepBinary(), true);
}
case OP_WITH_NO_RETRIES: {
CacheOperationContext opCtx = cache.context().operationContextPerCall();
if (opCtx != null && opCtx.noRetries())
return this;
return copy(rawCache.withNoRetries(), keepBinary);
}
case OP_WITH_SKIP_STORE: {
CacheOperationContext opCtx = cache.context().operationContextPerCall();
if (opCtx != null && opCtx.skipStore())
return this;
return copy(rawCache.withSkipStore(), keepBinary);
}
case OP_ITERATOR: {
Iterator<Cache.Entry> iter = cache.iterator();
return new PlatformCacheIterator(platformCtx, iter);
}
}
return super.processOutObject(type);
}
/** {@inheritDoc} */
@Override public long processInLongOutLong(int type, long val) throws IgniteCheckedException {
switch (type) {
case OP_SIZE: {
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes((int)val);
return cache.size(modes);
}
case OP_SIZE_LOC: {
CachePeekMode[] modes = PlatformUtils.decodeCachePeekModes((int)val);
return cache.localSize(modes);
}
case OP_ENTER_LOCK: {
try {
lock(val).lockInterruptibly();
return TRUE;
}
catch (InterruptedException e) {
throw new IgniteCheckedException("Failed to enter cache lock.", e);
}
}
case OP_EXIT_LOCK: {
lock(val).unlock();
return TRUE;
}
case OP_CLOSE_LOCK: {
Lock lock = lockMap.remove(val);
assert lock != null : "Failed to unregister lock: " + val;
return TRUE;
}
case OP_REBALANCE: {
PlatformFutureUtils.listen(platformCtx, cache.rebalance().chain(new C1<IgniteFuture, Object>() {
@Override public Object apply(IgniteFuture fut) {
return null;
}
}), val, PlatformFutureUtils.TYP_OBJ, this);
return TRUE;
}
case OP_CLEAR_CACHE:
cache.clear();
return TRUE;
case OP_REMOVE_ALL2:
cache.removeAll();
return TRUE;
case OP_RESET_QUERY_METRICS:
cache.resetQueryMetrics();
return TRUE;
case OP_PRELOAD_PARTITION:
cache.preloadPartition((int)val);
return TRUE;
case OP_ENABLE_STATISTICS:
cache.enableStatistics(val == TRUE);
return TRUE;
case OP_CLEAR_STATISTICS:
cache.clearStatistics();
return TRUE;
case OP_RESERVE_PARTITION: {
GridDhtLocalPartition locPart = getLocalPartition((int)val);
return locPart != null && locPart.reserve() ? TRUE : FALSE;
}
case OP_RELEASE_PARTITION: {
GridDhtLocalPartition locPart = getLocalPartition((int)val);
if (locPart != null) {
locPart.release();
return TRUE;
}
return FALSE;
}
}
return super.processInLongOutLong(type, val);
}
/** {@inheritDoc} */
@Override public Exception convertException(Exception e) {
if (e instanceof CachePartialUpdateException)
return new PlatformCachePartialUpdateException((CachePartialUpdateCheckedException)e.getCause(),
platformCtx, keepBinary);
if (e instanceof CachePartialUpdateCheckedException)
return new PlatformCachePartialUpdateException((CachePartialUpdateCheckedException)e, platformCtx, keepBinary);
if (e.getCause() instanceof EntryProcessorException)
return (Exception)e.getCause();
TransactionDeadlockException deadlockException = X.cause(e, TransactionDeadlockException.class);
if (deadlockException != null)
return deadlockException;
TransactionTimeoutException timeoutException = X.cause(e, TransactionTimeoutException.class);
if (timeoutException != null)
return timeoutException;
return super.convertException(e);
}
/**
* Writes the result of InvokeAll cache method.
*
* @param writer Writer.
* @param results Results.
*/
private static void writeInvokeAllResult(BinaryRawWriterEx writer, Map<Object, EntryProcessorResult> results) {
if (results == null) {
writer.writeInt(-1);
return;
}
writer.writeInt(results.size());
for (Map.Entry<Object, EntryProcessorResult> entry : results.entrySet()) {
writer.writeObjectDetached(entry.getKey());
EntryProcessorResult procRes = entry.getValue();
try {
Object res = procRes.get();
writer.writeBoolean(false); // No exception
writer.writeObjectDetached(res);
}
catch (Exception ex) {
writer.writeBoolean(true); // Exception
PlatformUtils.writeError(ex, writer);
}
}
}
/**
* Writes an error to the writer either as a native exception, or as a couple of strings.
* @param writer Writer.
* @param ex Exception.
*/
private static void writeError(BinaryRawWriterEx writer, Exception ex) {
if (ex.getCause() instanceof PlatformNativeException)
writer.writeObjectDetached(((PlatformNativeException)ex.getCause()).cause());
else {
writer.writeObjectDetached(ex.getClass().getName());
writer.writeObjectDetached(ex.getMessage());
writer.writeObjectDetached(X.getFullStackTrace(ex));
}
}
/**
* Get lock by id.
*
* @param id Id.
* @return Lock.
*/
private Lock lock(long id) {
Lock lock = lockMap.get(id);
assert lock != null : "Lock not found for ID: " + id;
return lock;
}
/**
* Registers a lock in a map.
*
* @param lock Lock to register.
* @return Registered lock id.
*/
private long registerLock(Lock lock) {
long id = LOCK_ID_GEN.incrementAndGet();
lockMap.put(id, lock);
return id;
}
/**
* Runs specified query.
*
* @param qry Query.
* @return Query cursor.
* @throws IgniteCheckedException On error.
*/
private PlatformQueryCursor runQuery(Query qry) throws IgniteCheckedException {
try {
QueryCursorEx cursor = (QueryCursorEx) cache.query(qry);
return new PlatformQueryCursor(platformCtx, cursor,
qry.getPageSize() > 0 ? qry.getPageSize() : Query.DFLT_PAGE_SIZE);
}
catch (Exception err) {
throw PlatformUtils.unwrapQueryException(err);
}
}
/**
* Runs specified fields query.
*
* @param qry Query.
* @return Query cursor.
* @throws IgniteCheckedException On error.
*/
private PlatformFieldsQueryCursor runFieldsQuery(Query qry)
throws IgniteCheckedException {
try {
QueryCursorEx cursor = (QueryCursorEx) cache.query(qry);
return new PlatformFieldsQueryCursor(platformCtx, cursor,
qry.getPageSize() > 0 ? qry.getPageSize() : Query.DFLT_PAGE_SIZE);
}
catch (Exception err) {
throw PlatformUtils.unwrapQueryException(err);
}
}
/**
* Reads the query of specified type.
*
* @param reader Binary reader.
* @return Query.
* @throws IgniteCheckedException On error.
*/
private Query readInitialQuery(BinaryRawReaderEx reader) throws IgniteCheckedException {
int typ = reader.readInt();
switch (typ) {
case -1:
return null;
case OP_QRY_SCAN:
return readScanQuery(reader);
case OP_QRY_SQL:
return readSqlQuery(reader);
case OP_QRY_TXT:
return readTextQuery(reader);
case OP_QRY_SQL_FIELDS:
return readFieldsQuery(reader);
}
throw new IgniteCheckedException("Unsupported query type: " + typ);
}
/**
* Reads sql query.
*
* @param reader Binary reader.
* @return Query.
*/
private Query readSqlQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
String sql = reader.readString();
String typ = reader.readString();
final int pageSize = reader.readInt();
Object[] args = readQueryArgs(reader);
boolean distrJoins = reader.readBoolean();
int timeout = reader.readInt();
boolean replicated = reader.readBoolean();
return new SqlQuery(typ, sql)
.setPageSize(pageSize)
.setArgs(args)
.setLocal(loc)
.setDistributedJoins(distrJoins)
.setTimeout(timeout, TimeUnit.MILLISECONDS)
.setReplicatedOnly(replicated);
}
/**
* Reads fields query.
*
* @param reader Binary reader.
* @return Query.
*/
private Query readFieldsQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
String sql = reader.readString();
final int pageSize = reader.readInt();
Object[] args = readQueryArgs(reader);
boolean distrJoins = reader.readBoolean();
boolean enforceJoinOrder = reader.readBoolean();
boolean lazy = reader.readBoolean();
int timeout = reader.readInt();
boolean replicated = reader.readBoolean();
boolean collocated = reader.readBoolean();
String schema = reader.readString();
SqlFieldsQuery qry = QueryUtils.withQueryTimeout(new SqlFieldsQuery(sql), timeout, TimeUnit.MILLISECONDS)
.setPageSize(pageSize)
.setArgs(args)
.setLocal(loc)
.setDistributedJoins(distrJoins)
.setEnforceJoinOrder(enforceJoinOrder)
.setLazy(lazy)
.setReplicatedOnly(replicated)
.setCollocated(collocated)
.setSchema(schema);
return qry;
}
/**
* Reads text query.
*
* @param reader Binary reader.
* @return Query.
*/
private Query readTextQuery(BinaryRawReader reader) {
boolean loc = reader.readBoolean();
String txt = reader.readString();
String typ = reader.readString();
final int pageSize = reader.readInt();
//TODO: IGNITE-12266, uncomment when limit parameter is added to Platforms
//
// final int limit = reader.readInt();
// return new TextQuery(typ, txt, limit).setPageSize(pageSize).setLocal(loc);
return new TextQuery(typ, txt).setPageSize(pageSize).setLocal(loc);
}
/**
* Reads scan query.
*
* @param reader Binary reader.
* @return Query.
*/
private Query readScanQuery(BinaryRawReaderEx reader) {
boolean loc = reader.readBoolean();
final int pageSize = reader.readInt();
boolean hasPart = reader.readBoolean();
Integer part = hasPart ? reader.readInt() : null;
ScanQuery qry = new ScanQuery().setPageSize(pageSize);
qry.setPartition(part);
Object pred = reader.readObjectDetached();
if (pred != null)
qry.setFilter(platformCtx.createCacheEntryFilter(pred, 0));
qry.setLocal(loc);
return qry;
}
/**
* Clones this instance.
*
* @param cache Cache.
* @param keepBinary Keep binary flag.
* @return Cloned instance.
*/
private PlatformCache copy(IgniteCache cache, boolean keepBinary) {
return new PlatformCache(platformCtx, cache, keepBinary, exts);
}
/**
* Get extension by ID.
*
* @param id ID.
* @return Extension.
*/
private PlatformCacheExtension extension(int id) {
if (exts != null && id < exts.length) {
PlatformCacheExtension ext = exts[id];
if (ext != null)
return ext;
}
throw new IgniteException("Platform cache extension is not registered [id=" + id + ']');
}
/**
* Writes cache metrics.
*
* @param writer Writer.
* @param metrics Metrics.
*/
public static void writeCacheMetrics(BinaryRawWriter writer, CacheMetrics metrics) {
assert writer != null;
assert metrics != null;
writer.writeLong(metrics.getCacheHits());
writer.writeFloat(metrics.getCacheHitPercentage());
writer.writeLong(metrics.getCacheMisses());
writer.writeFloat(metrics.getCacheMissPercentage());
writer.writeLong(metrics.getCacheGets());
writer.writeLong(metrics.getCachePuts());
writer.writeLong(metrics.getCacheRemovals());
writer.writeLong(metrics.getCacheEvictions());
writer.writeFloat(metrics.getAverageGetTime());
writer.writeFloat(metrics.getAveragePutTime());
writer.writeFloat(metrics.getAverageRemoveTime());
writer.writeFloat(metrics.getAverageTxCommitTime());
writer.writeFloat(metrics.getAverageTxRollbackTime());
writer.writeLong(metrics.getCacheTxCommits());
writer.writeLong(metrics.getCacheTxRollbacks());
writer.writeString(metrics.name());
writer.writeLong(metrics.getOffHeapGets());
writer.writeLong(metrics.getOffHeapPuts());
writer.writeLong(metrics.getOffHeapRemovals());
writer.writeLong(metrics.getOffHeapEvictions());
writer.writeLong(metrics.getOffHeapHits());
writer.writeFloat(metrics.getOffHeapHitPercentage());
writer.writeLong(metrics.getOffHeapMisses());
writer.writeFloat(metrics.getOffHeapMissPercentage());
writer.writeLong(metrics.getOffHeapEntriesCount());
writer.writeLong(metrics.getOffHeapPrimaryEntriesCount());
writer.writeLong(metrics.getOffHeapBackupEntriesCount());
writer.writeLong(metrics.getOffHeapAllocatedSize());
writer.writeInt(metrics.getSize());
writer.writeInt(metrics.getKeySize());
writer.writeBoolean(metrics.isEmpty());
writer.writeInt(metrics.getDhtEvictQueueCurrentSize());
writer.writeInt(metrics.getTxThreadMapSize());
writer.writeInt(metrics.getTxXidMapSize());
writer.writeInt(metrics.getTxCommitQueueSize());
writer.writeInt(metrics.getTxPrepareQueueSize());
writer.writeInt(metrics.getTxStartVersionCountsSize());
writer.writeInt(metrics.getTxCommittedVersionsSize());
writer.writeInt(metrics.getTxRolledbackVersionsSize());
writer.writeInt(metrics.getTxDhtThreadMapSize());
writer.writeInt(metrics.getTxDhtXidMapSize());
writer.writeInt(metrics.getTxDhtCommitQueueSize());
writer.writeInt(metrics.getTxDhtPrepareQueueSize());
writer.writeInt(metrics.getTxDhtStartVersionCountsSize());
writer.writeInt(metrics.getTxDhtCommittedVersionsSize());
writer.writeInt(metrics.getTxDhtRolledbackVersionsSize());
writer.writeBoolean(metrics.isWriteBehindEnabled());
writer.writeInt(metrics.getWriteBehindFlushSize());
writer.writeInt(metrics.getWriteBehindFlushThreadCount());
writer.writeLong(metrics.getWriteBehindFlushFrequency());
writer.writeInt(metrics.getWriteBehindStoreBatchSize());
writer.writeInt(metrics.getWriteBehindTotalCriticalOverflowCount());
writer.writeInt(metrics.getWriteBehindCriticalOverflowCount());
writer.writeInt(metrics.getWriteBehindErrorRetryCount());
writer.writeInt(metrics.getWriteBehindBufferSize());
writer.writeString(metrics.getKeyType());
writer.writeString(metrics.getValueType());
writer.writeBoolean(metrics.isStoreByValue());
writer.writeBoolean(metrics.isStatisticsEnabled());
writer.writeBoolean(metrics.isManagementEnabled());
writer.writeBoolean(metrics.isReadThrough());
writer.writeBoolean(metrics.isWriteThrough());
writer.writeBoolean(metrics.isValidForReading());
writer.writeBoolean(metrics.isValidForWriting());
writer.writeInt(metrics.getTotalPartitionsCount());
writer.writeInt(metrics.getRebalancingPartitionsCount());
writer.writeLong(metrics.getKeysToRebalanceLeft());
writer.writeLong(metrics.getRebalancingKeysRate());
writer.writeLong(metrics.getRebalancingBytesRate());
writer.writeLong(metrics.getHeapEntriesCount());
writer.writeLong(metrics.getEstimatedRebalancingFinishTime());
writer.writeLong(metrics.getRebalancingStartTime());
writer.writeLong(metrics.getRebalanceClearingPartitionsLeft());
writer.writeLong(metrics.getCacheSize());
writer.writeLong(metrics.getRebalancedKeys());
writer.writeLong(metrics.getEstimatedRebalancingKeys());
writer.writeLong(metrics.getEntryProcessorPuts());
writer.writeFloat(metrics.getEntryProcessorAverageInvocationTime());
writer.writeLong(metrics.getEntryProcessorInvocations());
writer.writeFloat(metrics.getEntryProcessorMaxInvocationTime());
writer.writeFloat(metrics.getEntryProcessorMinInvocationTime());
writer.writeLong(metrics.getEntryProcessorReadOnlyInvocations());
writer.writeFloat(metrics.getEntryProcessorHitPercentage());
writer.writeLong(metrics.getEntryProcessorHits());
writer.writeLong(metrics.getEntryProcessorMisses());
writer.writeFloat(metrics.getEntryProcessorMissPercentage());
writer.writeLong(metrics.getEntryProcessorRemovals());
}
/**
* Writes query metrics.
*
* @param writer Writer.
* @param metrics Metrics.
*/
public static void writeQueryMetrics(BinaryRawWriter writer, QueryMetrics metrics) {
assert writer != null;
assert metrics != null;
writer.writeLong(metrics.minimumTime());
writer.writeLong(metrics.maximumTime());
writer.writeDouble(metrics.averageTime());
writer.writeInt(metrics.executions());
writer.writeInt(metrics.fails());
}
/**
* Gets local partition.
*
* @param part Partition id.
* @return Partition when local, null otherwise.
*/
private GridDhtLocalPartition getLocalPartition(int part) throws IgniteCheckedException {
GridCacheContext cctx = cache.context();
if (part < 0 || part >= cctx.affinity().partitions())
throw new IgniteCheckedException("Invalid partition number: " + part);
GridDhtPartitionTopology top = cctx.topology();
AffinityTopologyVersion ver = top.readyTopologyVersion();
return top.localPartition(part, ver, false);
}
/**
* Writes error with EntryProcessorException cause.
*/
private static class GetAllWriter implements PlatformFutureUtils.Writer {
/** <inheritDoc /> */
@Override public void write(BinaryRawWriterEx writer, Object obj, Throwable err) {
assert obj instanceof Map;
PlatformUtils.writeNullableMap(writer, (Map) obj);
}
/** <inheritDoc /> */
@Override public boolean canWrite(Object obj, Throwable err) {
return err == null;
}
}
/**
* Writes error with EntryProcessorException cause.
*/
private static class EntryProcessorInvokeWriter implements PlatformFutureUtils.Writer {
/** <inheritDoc /> */
@Override public void write(BinaryRawWriterEx writer, Object obj, Throwable err) {
if (err == null) {
writer.writeBoolean(false); // No error.
writer.writeObjectDetached(obj);
}
else {
writer.writeBoolean(true); // Error.
PlatformUtils.writeError(err, writer);
}
}
/** <inheritDoc /> */
@Override public boolean canWrite(Object obj, Throwable err) {
return true;
}
}
/**
* Writes results of InvokeAll method.
*/
private static class EntryProcessorInvokeAllWriter implements PlatformFutureUtils.Writer {
/** <inheritDoc /> */
@Override public void write(BinaryRawWriterEx writer, Object obj, Throwable err) {
writeInvokeAllResult(writer, (Map)obj);
}
/** <inheritDoc /> */
@Override public boolean canWrite(Object obj, Throwable err) {
return obj != null && err == null;
}
}
/**
* Listenable around CompletionListener.
*/
private static class CompletionListenable implements PlatformListenable, CompletionListener {
/** */
private IgniteBiInClosure<Object, Throwable> lsnr;
/** {@inheritDoc} */
@Override public void onCompletion() {
assert lsnr != null;
lsnr.apply(null, null);
}
/** {@inheritDoc} */
@Override public void onException(Exception e) {
lsnr.apply(null, e);
}
/** {@inheritDoc} */
@Override public void listen(IgniteBiInClosure<Object, Throwable> lsnr) {
this.lsnr = lsnr;
}
/** {@inheritDoc} */
@Override public boolean cancel() throws IgniteCheckedException {
return false;
}
/** {@inheritDoc} */
@Override public boolean isCancelled() {
return false;
}
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.test.impl;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import org.jboss.arquillian.core.api.annotation.ApplicationScoped;
import org.jboss.arquillian.core.spi.Manager;
import org.jboss.arquillian.core.spi.ServiceLoader;
import org.jboss.arquillian.core.spi.context.ApplicationContext;
import org.jboss.arquillian.test.spi.LifecycleMethodExecutor;
import org.jboss.arquillian.test.spi.TestMethodExecutor;
import org.jboss.arquillian.test.spi.TestResult;
import org.jboss.arquillian.test.spi.context.ClassContext;
import org.jboss.arquillian.test.spi.context.SuiteContext;
import org.jboss.arquillian.test.spi.context.TestContext;
import org.jboss.arquillian.test.spi.event.suite.After;
import org.jboss.arquillian.test.spi.event.suite.AfterClass;
import org.jboss.arquillian.test.spi.event.suite.AfterSuite;
import org.jboss.arquillian.test.spi.event.suite.Before;
import org.jboss.arquillian.test.spi.event.suite.BeforeClass;
import org.jboss.arquillian.test.spi.event.suite.BeforeSuite;
import org.jboss.arquillian.test.spi.event.suite.BeforeTestLifecycleEvent;
import org.jboss.arquillian.test.spi.execution.ExecutionDecision;
import org.jboss.arquillian.test.spi.execution.TestExecutionDecider;
import org.jboss.arquillian.test.test.AbstractTestTestBase;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
/**
* Verifies that the {@link EventTestRunnerAdaptor} creates and fires the proper events.
*
* @author <a href="mailto:aknutsen@redhat.com">Aslak Knutsen</a>
* @version $Revision: $
*/
@RunWith(MockitoJUnitRunner.class)
public class EventTestRunnerAdaptorTestCase extends AbstractTestTestBase {
private static final TestExecutionDecider NEGATIVE_EXECUTION_DECIDER = new TestExecutionDecider() {
@Override
public ExecutionDecision decide(Method testMethod) {
return ExecutionDecision.dontExecute("Skipping execution of test method: " + testMethod.getName());
}
@Override
public int precedence() {
return 0;
}
};
@Override
protected void addExtensions(List<Class<?>> extensions) {
extensions.add(TestContextHandler.class);
}
@Override
protected void startContexts(Manager manager) {
// this is a test of the Context activation, don't auto start.
}
@Test
public void shouldSkipWhenUsingExecutionDecider() throws Exception {
List<TestExecutionDecider> deciders = new ArrayList<TestExecutionDecider>();
deciders.add(NEGATIVE_EXECUTION_DECIDER);
ServiceLoader serviceLoder = Mockito.mock(ServiceLoader.class);
Mockito.when(serviceLoder.all(TestExecutionDecider.class)).thenReturn(deciders);
Manager manager = Mockito.spy(getManager());
Mockito.when(manager.resolve(ServiceLoader.class)).thenReturn(serviceLoder);
EventTestRunnerAdaptor adaptor = new EventTestRunnerAdaptor(manager);
Class<?> testClass = getClass();
Method testMethod = testClass.getMethod("shouldSkipWhenUsingExecutionDecider");
Object testInstance = this;
TestMethodExecutor testExecutor = Mockito.mock(TestMethodExecutor.class);
Mockito.when(testExecutor.getMethod()).thenReturn(testMethod);
// ApplicationContext is auto started, deactivate to be future proof
manager.getContext(ApplicationContext.class).deactivate();
verifyNoActiveContext(manager);
adaptor.beforeSuite();
assertEventFired(BeforeSuite.class, 1);
assertEventFiredInContext(BeforeSuite.class, ApplicationContext.class);
assertEventFiredInContext(BeforeSuite.class, SuiteContext.class);
verifyNoActiveContext(manager);
adaptor.beforeClass(testClass, LifecycleMethodExecutor.NO_OP);
assertEventFired(BeforeClass.class, 1);
assertEventFiredInContext(BeforeClass.class, ApplicationContext.class);
assertEventFiredInContext(BeforeClass.class, SuiteContext.class);
assertEventFiredInContext(BeforeClass.class, ClassContext.class);
verifyNoActiveContext(manager);
adaptor.before(testInstance, testMethod, LifecycleMethodExecutor.NO_OP);
assertEventFired(Before.class, 0);
assertEventNotFiredInContext(Before.class, ApplicationContext.class);
assertEventNotFiredInContext(Before.class, SuiteContext.class);
assertEventNotFiredInContext(Before.class, ClassContext.class);
assertEventNotFiredInContext(Before.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.test(testExecutor);
assertEventFired(org.jboss.arquillian.test.spi.event.suite.Test.class, 0);
assertEventNotFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, ApplicationContext.class);
assertEventNotFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, SuiteContext.class);
assertEventNotFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, ClassContext.class);
assertEventNotFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.after(testInstance, testMethod, LifecycleMethodExecutor.NO_OP);
assertEventFired(After.class, 0);
assertEventNotFiredInContext(After.class, ApplicationContext.class);
assertEventNotFiredInContext(After.class, SuiteContext.class);
assertEventNotFiredInContext(After.class, ClassContext.class);
assertEventNotFiredInContext(After.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.fireCustomLifecycle(
new BeforeTestLifecycleEvent(testInstance, testMethod, LifecycleMethodExecutor.NO_OP));
assertEventFired(BeforeTestLifecycleEvent.class, 0);
assertEventNotFiredInContext(BeforeTestLifecycleEvent.class, ApplicationContext.class);
assertEventNotFiredInContext(BeforeTestLifecycleEvent.class, SuiteContext.class);
assertEventNotFiredInContext(BeforeTestLifecycleEvent.class, ClassContext.class);
assertEventNotFiredInContext(BeforeTestLifecycleEvent.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.afterClass(testClass, LifecycleMethodExecutor.NO_OP);
assertEventFired(AfterClass.class, 1);
assertEventFiredInContext(AfterClass.class, ApplicationContext.class);
assertEventFiredInContext(AfterClass.class, SuiteContext.class);
assertEventFiredInContext(AfterClass.class, ClassContext.class);
verifyNoActiveContext(manager);
adaptor.afterSuite();
assertEventFired(AfterSuite.class, 1);
assertEventFiredInContext(AfterSuite.class, ApplicationContext.class);
assertEventFiredInContext(AfterSuite.class, SuiteContext.class);
verifyNoActiveContext(manager);
}
@Test
public void shouldHandleLifeCycleEvents() throws Exception {
Manager manager = getManager();
manager.bind(ApplicationScoped.class, TestResult.class, TestResult.passed());
EventTestRunnerAdaptor adaptor = new EventTestRunnerAdaptor(manager);
Class<?> testClass = getClass();
Method testMethod = testClass.getMethod("shouldHandleLifeCycleEvents");
Object testInstance = this;
TestMethodExecutor testExecutor = Mockito.mock(TestMethodExecutor.class);
Mockito.when(testExecutor.getInstance()).thenReturn(testInstance);
Mockito.when(testExecutor.getMethod()).thenReturn(testMethod);
// ApplicationContext is auto started, deactivate to be future proof
manager.getContext(ApplicationContext.class).deactivate();
verifyNoActiveContext(manager);
adaptor.beforeSuite();
assertEventFired(BeforeSuite.class, 1);
assertEventFiredInContext(BeforeSuite.class, ApplicationContext.class);
assertEventFiredInContext(BeforeSuite.class, SuiteContext.class);
verifyNoActiveContext(manager);
adaptor.beforeClass(testClass, LifecycleMethodExecutor.NO_OP);
assertEventFired(BeforeClass.class, 1);
assertEventFiredInContext(BeforeClass.class, ApplicationContext.class);
assertEventFiredInContext(BeforeClass.class, SuiteContext.class);
assertEventFiredInContext(BeforeClass.class, ClassContext.class);
verifyNoActiveContext(manager);
adaptor.before(testInstance, testMethod, LifecycleMethodExecutor.NO_OP);
assertEventFired(Before.class, 1);
assertEventFiredInContext(Before.class, ApplicationContext.class);
assertEventFiredInContext(Before.class, SuiteContext.class);
assertEventFiredInContext(Before.class, ClassContext.class);
assertEventFiredInContext(Before.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.test(testExecutor);
assertEventFired(org.jboss.arquillian.test.spi.event.suite.Test.class, 1);
assertEventFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, ApplicationContext.class);
assertEventFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, SuiteContext.class);
assertEventFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, ClassContext.class);
assertEventFiredInContext(org.jboss.arquillian.test.spi.event.suite.Test.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.after(testInstance, testMethod, LifecycleMethodExecutor.NO_OP);
assertEventFired(After.class, 1);
assertEventFiredInContext(After.class, ApplicationContext.class);
assertEventFiredInContext(After.class, SuiteContext.class);
assertEventFiredInContext(After.class, ClassContext.class);
assertEventFiredInContext(After.class, TestContext.class);
verifyNoActiveContext(manager);
adaptor.afterClass(testClass, LifecycleMethodExecutor.NO_OP);
assertEventFired(AfterClass.class, 1);
assertEventFiredInContext(AfterClass.class, ApplicationContext.class);
assertEventFiredInContext(AfterClass.class, SuiteContext.class);
assertEventFiredInContext(AfterClass.class, ClassContext.class);
verifyNoActiveContext(manager);
adaptor.afterSuite();
assertEventFired(AfterSuite.class, 1);
assertEventFiredInContext(AfterSuite.class, ApplicationContext.class);
assertEventFiredInContext(AfterSuite.class, SuiteContext.class);
verifyNoActiveContext(manager);
}
private void verifyNoActiveContext(Manager manager) {
verify(false, false, false, false, manager);
}
private void verify(boolean application, boolean suite, boolean clazz, boolean test, Manager manager) {
Assert.assertEquals(
"ApplicationContext should" + (!application ? " not" : "") + " be active",
application, manager.getContext(ApplicationContext.class).isActive());
Assert.assertEquals(
"SuiteContext should" + (!suite ? " not" : "") + " be active",
suite, manager.getContext(SuiteContext.class).isActive());
Assert.assertEquals(
"ClassContext should" + (!clazz ? " not" : "") + " be active",
clazz, manager.getContext(ClassContext.class).isActive());
Assert.assertEquals(
"TestContext should" + (!test ? " not" : "") + " be active",
test, manager.getContext(TestContext.class).isActive());
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/** Interface of <a href="http://schema.org/CityHall}">http://schema.org/CityHall}</a>. */
public interface CityHall extends GovernmentBuilding {
/** Builder interface of <a href="http://schema.org/CityHall}">http://schema.org/CityHall}</a>. */
public interface Builder extends GovernmentBuilding.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property additionalProperty. */
Builder addAdditionalProperty(PropertyValue value);
/** Add a value to property additionalProperty. */
Builder addAdditionalProperty(PropertyValue.Builder value);
/** Add a value to property additionalProperty. */
Builder addAdditionalProperty(String value);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property address. */
Builder addAddress(PostalAddress value);
/** Add a value to property address. */
Builder addAddress(PostalAddress.Builder value);
/** Add a value to property address. */
Builder addAddress(Text value);
/** Add a value to property address. */
Builder addAddress(String value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating.Builder value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property branchCode. */
Builder addBranchCode(Text value);
/** Add a value to property branchCode. */
Builder addBranchCode(String value);
/** Add a value to property containedIn. */
Builder addContainedIn(Place value);
/** Add a value to property containedIn. */
Builder addContainedIn(Place.Builder value);
/** Add a value to property containedIn. */
Builder addContainedIn(String value);
/** Add a value to property containedInPlace. */
Builder addContainedInPlace(Place value);
/** Add a value to property containedInPlace. */
Builder addContainedInPlace(Place.Builder value);
/** Add a value to property containedInPlace. */
Builder addContainedInPlace(String value);
/** Add a value to property containsPlace. */
Builder addContainsPlace(Place value);
/** Add a value to property containsPlace. */
Builder addContainsPlace(Place.Builder value);
/** Add a value to property containsPlace. */
Builder addContainsPlace(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property event. */
Builder addEvent(Event value);
/** Add a value to property event. */
Builder addEvent(Event.Builder value);
/** Add a value to property event. */
Builder addEvent(String value);
/** Add a value to property events. */
Builder addEvents(Event value);
/** Add a value to property events. */
Builder addEvents(Event.Builder value);
/** Add a value to property events. */
Builder addEvents(String value);
/** Add a value to property faxNumber. */
Builder addFaxNumber(Text value);
/** Add a value to property faxNumber. */
Builder addFaxNumber(String value);
/** Add a value to property geo. */
Builder addGeo(GeoCoordinates value);
/** Add a value to property geo. */
Builder addGeo(GeoCoordinates.Builder value);
/** Add a value to property geo. */
Builder addGeo(GeoShape value);
/** Add a value to property geo. */
Builder addGeo(GeoShape.Builder value);
/** Add a value to property geo. */
Builder addGeo(String value);
/** Add a value to property globalLocationNumber. */
Builder addGlobalLocationNumber(Text value);
/** Add a value to property globalLocationNumber. */
Builder addGlobalLocationNumber(String value);
/** Add a value to property hasMap. */
Builder addHasMap(Map value);
/** Add a value to property hasMap. */
Builder addHasMap(Map.Builder value);
/** Add a value to property hasMap. */
Builder addHasMap(URL value);
/** Add a value to property hasMap. */
Builder addHasMap(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property isicV4. */
Builder addIsicV4(Text value);
/** Add a value to property isicV4. */
Builder addIsicV4(String value);
/** Add a value to property logo. */
Builder addLogo(ImageObject value);
/** Add a value to property logo. */
Builder addLogo(ImageObject.Builder value);
/** Add a value to property logo. */
Builder addLogo(URL value);
/** Add a value to property logo. */
Builder addLogo(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property map. */
Builder addMap(URL value);
/** Add a value to property map. */
Builder addMap(String value);
/** Add a value to property maps. */
Builder addMaps(URL value);
/** Add a value to property maps. */
Builder addMaps(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property openingHours. */
Builder addOpeningHours(Text value);
/** Add a value to property openingHours. */
Builder addOpeningHours(String value);
/** Add a value to property openingHoursSpecification. */
Builder addOpeningHoursSpecification(OpeningHoursSpecification value);
/** Add a value to property openingHoursSpecification. */
Builder addOpeningHoursSpecification(OpeningHoursSpecification.Builder value);
/** Add a value to property openingHoursSpecification. */
Builder addOpeningHoursSpecification(String value);
/** Add a value to property photo. */
Builder addPhoto(ImageObject value);
/** Add a value to property photo. */
Builder addPhoto(ImageObject.Builder value);
/** Add a value to property photo. */
Builder addPhoto(Photograph value);
/** Add a value to property photo. */
Builder addPhoto(Photograph.Builder value);
/** Add a value to property photo. */
Builder addPhoto(String value);
/** Add a value to property photos. */
Builder addPhotos(ImageObject value);
/** Add a value to property photos. */
Builder addPhotos(ImageObject.Builder value);
/** Add a value to property photos. */
Builder addPhotos(Photograph value);
/** Add a value to property photos. */
Builder addPhotos(Photograph.Builder value);
/** Add a value to property photos. */
Builder addPhotos(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property review. */
Builder addReview(Review value);
/** Add a value to property review. */
Builder addReview(Review.Builder value);
/** Add a value to property review. */
Builder addReview(String value);
/** Add a value to property reviews. */
Builder addReviews(Review value);
/** Add a value to property reviews. */
Builder addReviews(Review.Builder value);
/** Add a value to property reviews. */
Builder addReviews(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property telephone. */
Builder addTelephone(Text value);
/** Add a value to property telephone. */
Builder addTelephone(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link CityHall} object. */
CityHall build();
}
}
| |
/*
* Copyright (c) 2009 WiQuery team
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.odlabs.wiquery.ui.datepicker;
import java.util.Locale;
import org.odlabs.wiquery.core.commons.WiQueryJavaScriptResourceReference;
/**
* $Id: DatePickerLanguageResourceReference.java 81 2009-05-28 20:05:12Z
* lionel.armanet $
* <p>
* Provides a JavaScript reference to display the date picker labels for a given
* local.
* </p>
*
* @author Lionel Armanet
* @since 0.6
*/
public class DatePickerLanguageResourceReference extends
WiQueryJavaScriptResourceReference {
/**
* Enumeration of possibles alternatives languages for the DatePicker
*
* @author Julien Roche
* @note Hielke Hoeve: ordered languages by order of appearance on the file
* system. There are 3 countries whom have had their ISO code changed.
* Java has an ugly fix to fix this which we are stuck with... We copied the resource files to reflect both country codes, while Java only uses one of them...
*/
public enum DatePickerLanguages {
AFRIKAANS ("af"),
ALGERIAN ("ar", "DZ"),
ARABIC ("ar"),
AZERBAIJANI ("az"),
BULGARIAN ("bg"),
BOSNIAN ("bs"),
CATALAN ("ca"),
CZECH ("cs"),
DANISH ("da"),
GERMAN (Locale.GERMAN),
GREEK ("el"),
AUSTRALIAN ("en", "AU"),
UNITED_KINGDOM (Locale.UK),
NEW_ZEALAND ("en", "NZ"),
ESPERANTO ("eo"),
SPANISH ("es"),
ESTONIAN ("et"),
EUSKARAKO ("eu"),
PERSIAN ("fa"),
FINNISH ("fi"),
FAROESE ("fo"),
SWISS ("fr", "CH"),
FRENCH (Locale.FRENCH),
GALICIAN ("gl"),
HEBREW ("he"),
//HEBREW_OLD ("iw"),
CROATIAN ("hr"),
HUNGARIAN ("hu"),
ARMENIAN ("hy"),
INDONESIAN ("id"),
//INDONESIAN_OLD ("in"),
ICELANDIC ("is"),
ITALIAN (Locale.ITALIAN),
JAPANESE (Locale.JAPANESE),
KOREAN (Locale.KOREAN),
KAZAKH ("kz"),
LITHUANIAN ("lt"),
LATVIAN ("lv"),
MALAYALAM ("ml"),
MALAYSIAN ("ms"),
DUTCH ("nl"),
NORVEGIAN ("no"),
POLISH ("pl"),
BRAZILIAN ("pt", "BR"),
PORTUGUESE ("pt"),
ROMANSH ("rm"),
ROMANIAN ("ro"),
RUSSIAN ("ru"),
SLOVAK ("sk"),
SLOVENIAN ("sl"),
ALBANIAN ("sq"),
SERBIA ("sr", "SR"),
SERBIAN ("sr"),
SWEDISH ("sv"),
TAMIL ("ta"),
THAI ("th"),
TAJIKISTAN ("tj"),
TURKISH ("tr"),
UKRAINIAN ("uk"),
VIETNAMESE ("vi"),
SIMPLIFIED_CHINESE (Locale.SIMPLIFIED_CHINESE),
CHINESE ("zh", "HK"),
TRADITIONAL_CHINESE (Locale.TRADITIONAL_CHINESE);
// Properties
private final Locale locale;
/**
* Constructor
* @param locale
*/
DatePickerLanguages(Locale locale) {
this.locale = locale;
}
/**
* Constructor
* @param language
*/
DatePickerLanguages(String language) {
this.locale = new Locale(language);
}
/**
* Constructor
* @param language
* @param country
*/
DatePickerLanguages(String language, String country) {
this.locale = new Locale(language, country);
}
/**
* Constructor
* @param language
* @param country
* @param variant
*/
DatePickerLanguages(String language, String country, String variant) {
this.locale = new Locale(language, country, variant);
}
/**
* @return the locale
*/
public Locale getLocale() {
return locale;
}
/**
* Try to find the most appropriate value in the enumeration
* @param locale Locale to search
* @return the value
*/
public static DatePickerLanguages getDatePickerLanguages(Locale locale){
if(locale == null)
return null;
Locale tmpLocale = null;
String language = locale.getLanguage();
String country = locale.getCountry();
String variant = locale.getVariant();
String empty = "";
country = country == null || country.trim().length() <= 0 ? null : country;
variant = variant == null || variant.trim().length() <= 0 ? null : variant;
// Equals on language-country-variant
if(variant != null){
for(DatePickerLanguages l : values()){
tmpLocale = l.getLocale();
if(tmpLocale.getLanguage().equals(language)
&& tmpLocale.getCountry().equals(country)
&& tmpLocale.getVariant().equals(variant)){
return l;
}
}
}
// Equals on language-country
if(country != null){
for(DatePickerLanguages l : values()){
tmpLocale = l.getLocale();
if(tmpLocale.getLanguage().equals(language)
&& tmpLocale.getCountry().equals(country)
&& tmpLocale.getVariant().equals(empty)){
return l;
}
}
}
// Equals on language
for(DatePickerLanguages l : values()){
tmpLocale = l.getLocale();
if(tmpLocale.getLanguage().equals(language)
&& tmpLocale.getCountry().equals(empty)
&& tmpLocale.getVariant().equals(empty)){
return l;
}
}
return null;
}
/**
* Method calculating the name of the jQuery UI Locale file
* @param dpl Language to use
* @return the filename
*/
public static String getJsFileName(DatePickerLanguages dpl) {
if(dpl == null){
return null;
}
Locale locale = dpl.getLocale();
String country = locale.getCountry();
String variant = locale.getVariant();
StringBuffer js = new StringBuffer();
js.append("i18n/jquery.ui.datepicker-");
js.append(locale.getLanguage());
if(country != null && country.trim().length() > 0){
js.append("-" + country);
if(variant != null && variant.trim().length() > 0){
js.append("-" + variant);
}
}
js.append(".js");
return js.toString();
}
}
// Constants
/** Constant of serialization */
private static final long serialVersionUID = 5955164494361831059L;
protected DatePickerLanguageResourceReference(Locale locale, String filename) {
super(DatePickerLanguageResourceReference.class, filename);
}
/**
* @param locale
* providing an unknown locale will return null, because Locale US, EN and EN_US are
* already included in the datepicker js file.
* @return an DatePickerLanguageResourceReference if the locale is known within our DatePickerLanguages class.
*/
public static DatePickerLanguageResourceReference get(Locale locale)
{
DatePickerLanguages dpl = DatePickerLanguages.getDatePickerLanguages(locale);
if(dpl != null)
return new DatePickerLanguageResourceReference(locale, DatePickerLanguages.getJsFileName(dpl));
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.optimizer.rules;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.lang.aql.util.FunctionUtils;
import org.apache.asterix.om.base.ADouble;
import org.apache.asterix.om.base.AFloat;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.IAObject;
import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
/**
* Looks for a select operator, containing a condition:
* similarity-function GE/GT/LE/LE constant/variable
* Rewrites the select condition (and possibly the assign expr) with the equivalent similarity-check function.
*/
public class SimilarityCheckRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
// Look for select.
if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
return false;
}
SelectOperator select = (SelectOperator) op;
Mutable<ILogicalExpression> condExpr = select.getCondition();
// Gather assigns below this select.
List<AssignOperator> assigns = new ArrayList<AssignOperator>();
AbstractLogicalOperator childOp = (AbstractLogicalOperator) select.getInputs().get(0).getValue();
// Skip selects.
while (childOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
}
while (childOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
assigns.add((AssignOperator) childOp);
childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
}
return replaceSelectConditionExprs(condExpr, assigns, context);
}
private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns,
IOptimizationContext context) throws AlgebricksException {
ILogicalExpression expr = expRef.getValue();
if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
}
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
// Recursively traverse conjuncts.
// TODO: Ignore disjuncts for now, because some replacements may be invalid.
// For example, if the result of the similarity function is used somewhere upstream,
// then we may still need the true similarity value even if the GE/GT/LE/LE comparison returns false.
if (funcIdent == AlgebricksBuiltinFunctions.AND) {
boolean found = true;
for (int i = 0; i < funcExpr.getArguments().size(); ++i) {
found = found && replaceSelectConditionExprs(funcExpr.getArguments().get(i), assigns, context);
}
return found;
}
// Look for GE/GT/LE/LT.
if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT
&& funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
return false;
}
// One arg should be a function call or a variable, the other a constant.
AsterixConstantValue constVal = null;
ILogicalExpression nonConstExpr = null;
ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
// Normalized GE/GT/LE/LT as if constant was on the right hand side.
FunctionIdentifier normFuncIdent = null;
// One of the args must be a constant.
if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
ConstantExpression constExpr = (ConstantExpression) arg1;
constVal = (AsterixConstantValue) constExpr.getValue();
nonConstExpr = arg2;
// Get func ident as if swapping lhs and rhs.
normFuncIdent = getLhsAndRhsSwappedFuncIdent(funcIdent);
} else if (arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
ConstantExpression constExpr = (ConstantExpression) arg2;
constVal = (AsterixConstantValue) constExpr.getValue();
nonConstExpr = arg1;
// Constant is already on rhs, so nothing to be done for normalizedFuncIdent.
normFuncIdent = funcIdent;
} else {
return false;
}
// The other arg is a function call. We can directly replace the select condition with an equivalent similarity check expression.
if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal,
(AbstractFunctionCallExpression) nonConstExpr);
}
// The other arg ist a variable. We may have to introduce an assign operator that assigns the result of a similarity-check function to a variable.
if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr,
assigns, context);
}
return false;
}
private boolean replaceWithVariableArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns,
IOptimizationContext context) throws AlgebricksException {
// Find variable in assigns to determine its originating function.
LogicalVariable var = varRefExpr.getVariableReference();
Mutable<ILogicalExpression> simFuncExprRef = null;
ScalarFunctionCallExpression simCheckFuncExpr = null;
AssignOperator matchingAssign = null;
for (int i = 0; i < assigns.size(); i++) {
AssignOperator assign = assigns.get(i);
for (int j = 0; j < assign.getVariables().size(); j++) {
// Check if variables match.
if (var != assign.getVariables().get(j)) {
continue;
}
// Check if corresponding expr is a function call.
if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
continue;
}
simFuncExprRef = assign.getExpressions().get(j);
// Analyze function expression and get equivalent similarity check function.
simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal,
(AbstractFunctionCallExpression) simFuncExprRef.getValue());
matchingAssign = assign;
break;
}
if (simCheckFuncExpr != null) {
break;
}
}
// Only non-null if we found that varRefExpr refers to an optimizable similarity function call.
if (simCheckFuncExpr != null) {
// Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
LogicalVariable newVar = context.newVar();
AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(
simCheckFuncExpr));
// Hook up inputs.
newAssign.getInputs()
.add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
matchingAssign.getInputs().get(0).setValue(newAssign);
// Replace select condition with a get-item on newVar.
List<Mutable<ILogicalExpression>> selectGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
// First arg is a variable reference expr on newVar.
selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
// Second arg is the item index to be accessed, here 0.
selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
new AsterixConstantValue(new AInt32(0)))));
ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
// Replace the old similarity function call with the new getItemExpr.
expRef.setValue(selectGetItemExpr);
// Replace expr corresponding to original variable in the original assign with a get-item on newVar.
List<Mutable<ILogicalExpression>> assignGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
// First arg is a variable reference expr on newVar.
assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
// Second arg is the item index to be accessed, here 1.
assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
new AsterixConstantValue(new AInt32(1)))));
ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
// Replace the original assign expr with the get-item expr.
simFuncExprRef.setValue(assignGetItemExpr);
context.computeAndSetTypeEnvironmentForOperator(newAssign);
context.computeAndSetTypeEnvironmentForOperator(matchingAssign);
return true;
}
return false;
}
private boolean replaceWithFunctionCallArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) throws AlgebricksException {
// Analyze func expr to see if it is an optimizable similarity function.
ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr);
// Replace the expr in the select condition.
if (simCheckFuncExpr != null) {
// Get item 0 from var.
List<Mutable<ILogicalExpression>> getItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
// First arg is the similarity-check function call.
getItemArgs.add(new MutableObject<ILogicalExpression>(simCheckFuncExpr));
// Second arg is the item index to be accessed.
getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
new AInt32(0)))));
ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
// Replace the old similarity function call with the new getItemExpr.
expRef.setValue(getItemExpr);
return true;
}
return false;
}
private ScalarFunctionCallExpression getSimilarityCheckExpr(FunctionIdentifier normFuncIdent,
AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) throws AlgebricksException {
// Remember args from original similarity function to add them to the similarity-check function later.
ArrayList<Mutable<ILogicalExpression>> similarityArgs = null;
ScalarFunctionCallExpression simCheckFuncExpr = null;
// Look for jaccard function call, and GE or GT.
if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD) {
IAObject jaccThresh;
if (normFuncIdent == AlgebricksBuiltinFunctions.GE) {
if (constVal.getObject() instanceof AFloat) {
jaccThresh = constVal.getObject();
} else {
jaccThresh = new AFloat((float) ((ADouble) constVal.getObject()).getDoubleValue());
}
} else if (normFuncIdent == AlgebricksBuiltinFunctions.GT) {
float threshVal = 0.0f;
if (constVal.getObject() instanceof AFloat) {
threshVal = ((AFloat) constVal.getObject()).getFloatValue();
} else {
threshVal = (float) ((ADouble) constVal.getObject()).getDoubleValue();
}
float f = threshVal + Float.MIN_VALUE;
if (f > 1.0f)
f = 1.0f;
jaccThresh = new AFloat(f);
} else {
return null;
}
similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
similarityArgs.addAll(funcExpr.getArguments());
similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
jaccThresh))));
simCheckFuncExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK), similarityArgs);
}
// Look for edit-distance function call, and LE or LT.
if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE) {
AInt32 aInt = new AInt32(0);
try {
aInt = (AInt32) ATypeHierarchy.convertNumericTypeObject(constVal.getObject(), ATypeTag.INT32);
} catch (AsterixException e) {
throw new AlgebricksException(e);
}
AInt32 edThresh;
if (normFuncIdent == AlgebricksBuiltinFunctions.LE) {
edThresh = aInt;
} else if (normFuncIdent == AlgebricksBuiltinFunctions.LT) {
int ed = aInt.getIntegerValue() - 1;
if (ed < 0)
ed = 0;
edThresh = new AInt32(ed);
} else {
return null;
}
similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
similarityArgs.addAll(funcExpr.getArguments());
similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
edThresh))));
simCheckFuncExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK), similarityArgs);
}
// Preserve all annotations.
if (simCheckFuncExpr != null) {
simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
}
return simCheckFuncExpr;
}
private FunctionIdentifier getLhsAndRhsSwappedFuncIdent(FunctionIdentifier oldFuncIdent) {
if (oldFuncIdent == AlgebricksBuiltinFunctions.GE) {
return AlgebricksBuiltinFunctions.LE;
}
if (oldFuncIdent == AlgebricksBuiltinFunctions.GT) {
return AlgebricksBuiltinFunctions.LT;
}
if (oldFuncIdent == AlgebricksBuiltinFunctions.LE) {
return AlgebricksBuiltinFunctions.GE;
}
if (oldFuncIdent == AlgebricksBuiltinFunctions.LT) {
return AlgebricksBuiltinFunctions.GT;
}
throw new IllegalStateException();
}
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
return false;
}
}
| |
package org.dainst.chronontology.handler.model;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.dainst.chronontology.TestConstants;
import org.dainst.chronontology.util.JsonUtils;
import org.json.JSONException;
import org.testng.annotations.Test;
import java.io.IOException;
import static org.dainst.chronontology.JsonTestUtils.jsonAssertEquals;
import static org.dainst.chronontology.util.JsonUtils.json;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.fail;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
/**
* @author Daniel de Oliveira
*/
public class DocumentTest {
public static final String ADMIN = "admin";
/**
* Produces a node whose modified dates array
* is a merge of both the arguments nodes created dates.
* @param old
* @param niew
* @return
*/
private JsonNode nodeWithModifiedDates(JsonNode old, JsonNode niew) {
ObjectNode example= (ObjectNode) json();
ArrayNode a = example.putArray(Document.MODIFIED);
a.add(niew.get(Document.CREATED));
return example;
}
private JsonNode makeNodeWithVersion(int version) {
JsonNode example= json();
((ObjectNode)example).put(Document.VERSION,version);
return example;
}
private Document exampleDoc() {
return new Document("1",TestConstants.TEST_TYPE,json(), ADMIN);
}
/**
* This is like a resource already stored succesfully.
* @return
*/
private JsonNode oldJson() {
return json("{\"id\":\"1\",\"type\":\""+ TestConstants.TEST_TYPE+"\"}");
}
private JsonNode single(String k,String v) {
return json("{\""+k+"\":\""+v+"\"}");
}
@Test
public void setResourceId() {
Document doc= exampleDoc();
assertEquals(doc.j().get(Document.RESOURCE).get(Document.ID).toString(),"\"1\"");
assertEquals(doc.getId(),"1");
}
@Test
public void setResourceType() {
Document doc= exampleDoc();
assertEquals(doc.j().get(Document.RESOURCE).get(Document.TYPE).toString(),"\""+TestConstants.TEST_TYPE+"\"");
assertEquals(doc.getType(),TestConstants.TEST_TYPE);
}
@Test
public void createdDateStaysSame() throws IOException, InterruptedException {
Document old= exampleDoc();
Thread.sleep(10);
Document dm= exampleDoc();
jsonAssertEquals(
dm.merge(old).j().get(Document.CREATED),
old.j().get(Document.CREATED));
}
@Test
public void modifiedDatesMerge() throws IOException, InterruptedException, JSONException {
Document old= exampleDoc();
Thread.sleep(10);
Document dm= exampleDoc();
JsonNode nodeWithDates = nodeWithModifiedDates(old.j(), dm.j());
jsonAssertEquals(
dm.merge(old).j(),
nodeWithDates);
}
@Test
public void mergeTakesIdAndTypeFromOldDoc() {
Document oldOne= exampleDoc();
Document newOne= new Document("2","other",json(), ADMIN);;
newOne.merge(oldOne);
assertEquals(newOne.getId(),"1");
assertEquals(newOne.getType(),TestConstants.TEST_TYPE);
}
@Test
public void filterUnsupported() {
JsonNode n= JsonUtils.json();
((ObjectNode)n).put("a","a"); // unwanted
((ObjectNode)n).put(Document.ID,"1"); // unwanted
((ObjectNode)n).put(Document.RESOURCE,json());
((ObjectNode)n).put(Document.DATASET,"c");
Document dm=
new Document("1",TestConstants.TEST_TYPE,n, ADMIN);
assertNotNull(dm.j().get(Document.RESOURCE));
assertNotNull(dm.j().get(Document.DATASET));
assertNull(dm.j().get("a"));
assertNull(dm.j().get(Document.ID));
}
@Test
public void setVersionOnCreate() throws IOException, InterruptedException, JSONException {
jsonAssertEquals(
new Document("1",TestConstants.TEST_TYPE,json(), ADMIN).j(),
makeNodeWithVersion(1));
}
@Test
public void setCreateUserOnCreate() throws IOException {
jsonAssertEquals(exampleDoc().j()
.get(Document.CREATED),
single("user",ADMIN));
}
@Test
public void doNotSetModifiedUserOnCreate() throws IOException {
jsonAssertEquals(
new Document("1",TestConstants.TEST_TYPE,json(), ADMIN).j().
get(Document.MODIFIED),
json("[]"));
}
@Test
public void differentUserOnModify() throws IOException {
Document old= new Document("1",TestConstants.TEST_TYPE,json(), ADMIN);
Document dm=
new Document("1",TestConstants.TEST_TYPE,json(), "ove");
jsonAssertEquals(
dm.merge(old).j().
get(Document.MODIFIED).get(0),
single("user","ove"));
}
@Test
public void countVersions() throws IOException, InterruptedException, JSONException {
Document old=exampleDoc();
Document dm=exampleDoc();
jsonAssertEquals(dm.merge(old).j(), makeNodeWithVersion(2));
}
@Test
public void createFromOld() {
JsonNode n= json();
((ObjectNode)n).put(Document.RESOURCE,oldJson());
((ObjectNode)n).put(Document.CREATED,single("date","today"));
Document dm= Document.from(n);
assertEquals(dm.getId(),"1");
assertEquals(dm.getType(),TestConstants.TEST_TYPE);
assertEquals(dm.j().get(Document.CREATED),single("date","today"));
}
@Test
public void createFromOldNoType() {
JsonNode n = json();
((ObjectNode) n).put(Document.RESOURCE, single(Document.ID, "1"));
try {
Document.from(n);
fail();
} catch (Exception expected) {
}
}
@Test
public void createFromOldNull() {
assertEquals(Document.from(null),null);
}
@Test
public void getDataset() {
JsonNode n= json();
((ObjectNode)n).put(Document.RESOURCE,oldJson());
((ObjectNode)n).put(Document.DATASET,"1");
Document dm= Document.from(n);
assertEquals(dm.getDataset(),"1");
}
@Test
public void noDataset() {
JsonNode n= json();
((ObjectNode)n).put(Document.RESOURCE,oldJson());
Document dm= Document.from(n);
assertEquals(dm.getDataset(),null);
}
}
| |
/*******************************************************************************
* Copyright (c) 2015 - 2017
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.graphics.debug;
import go.graphics.GLDrawContext;
import go.graphics.area.Area;
import go.graphics.event.GOEvent;
import go.graphics.event.GOKeyEvent;
import go.graphics.region.Region;
import go.graphics.region.RegionContent;
import go.graphics.swing.AreaContainer;
import go.graphics.text.EFontSize;
import go.graphics.text.TextDrawer;
import jsettlers.common.Color;
import jsettlers.common.resources.ResourceManager;
import jsettlers.common.resources.SettlersFolderChecker;
import jsettlers.common.utils.FileUtils;
import jsettlers.common.utils.mutables.Mutable;
import jsettlers.graphics.image.GuiImage;
import jsettlers.graphics.image.Image;
import jsettlers.graphics.image.LandscapeImage;
import jsettlers.graphics.image.SettlerImage;
import jsettlers.graphics.image.SingleImage;
import jsettlers.graphics.image.reader.AdvancedDatFileReader;
import jsettlers.graphics.image.reader.DatFileReader;
import jsettlers.graphics.image.reader.DatFileType;
import jsettlers.graphics.image.sequence.SequenceList;
import jsettlers.graphics.image.sequence.Sequence;
import jsettlers.main.swing.resources.SwingResourceProvider;
import jsettlers.main.swing.settings.SettingsManager;
import javax.imageio.ImageIO;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.WindowConstants;
import java.awt.Dimension;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.Locale;
public class DatFileTester {
private static final int DAT_FILE_INDEX = 13;
private static final DatFileType TYPE = DatFileType.RGB565;
private static final String FILE_NAME_PATTERN = "siedler3_%02d" + TYPE.getFileSuffix();
private static final String FILE_NAME = String.format(Locale.ENGLISH, FILE_NAME_PATTERN, DAT_FILE_INDEX);
private static final Color[] colors = new Color[] { Color.WHITE };
private final DatFileReader reader;
private final Region region;
private DatFileTester() throws IOException {
ResourceManager.setProvider(new SwingResourceProvider());
File settlersGfxFolder = getSettlersGfxFolder();
File file = findFileIgnoringCase(settlersGfxFolder, FILE_NAME);
reader = new AdvancedDatFileReader(file, TYPE);
region = new Region(Region.POSITION_CENTER);
region.setContent(new Content());
}
/**
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
DatFileTester datFileTester = new DatFileTester();
Area area = new Area();
area.add(datFileTester.region);
AreaContainer glCanvas = new AreaContainer(area);
JFrame frame = new JFrame("Opengl image test: " + DAT_FILE_INDEX);
frame.getContentPane().add(glCanvas);
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.setSize(new Dimension(400, 400));
frame.setVisible(true);
}
private class Content implements RegionContent {
private static final int SETTLERS = 1;
private static final int GUI = 2;
private static final int LANDSCAPE = 3;
private int offsetY = 400;
private int offsetX = 200;
private int mode = SETTLERS;
public Content() {
printHelp();
}
@Override
public void handleEvent(GOEvent event) {
if (event instanceof GOKeyEvent) {
String keyCode = ((GOKeyEvent) event).getKeyCode();
if ("UP".equalsIgnoreCase(keyCode)) {
offsetY -= 400;
} else if ("DOWN".equalsIgnoreCase(keyCode)) {
offsetY += 400;
} else if ("LEFT".equalsIgnoreCase(keyCode)) {
offsetX += 100;
} else if ("RIGHT".equalsIgnoreCase(keyCode)) {
offsetX -= 100;
} else if ("L".equalsIgnoreCase(keyCode)) {
mode = LANDSCAPE;
} else if ("S".equalsIgnoreCase(keyCode)) {
mode = SETTLERS;
} else if ("G".equalsIgnoreCase(keyCode)) {
mode = GUI;
} else if ("E".equalsIgnoreCase(keyCode)) {
export();
} else if ("W".equalsIgnoreCase(keyCode)) {
try {
exportAll();
} catch (IOException e) {
e.printStackTrace();
}
}
}
region.requestRedraw();
}
@Override
public void drawContent(GLDrawContext gl2, int width, int height) {
if (mode == SETTLERS) {
SequenceList<Image> sequences = reader.getSettlers();
drawSequences(gl2, width, height, sequences);
} else if (mode == GUI) {
Sequence<GuiImage> sequences = reader.getGuis();
drawSequence(gl2, width, height, 0, sequences);
} else {
Sequence<LandscapeImage> sequences = reader.getLandscapes();
drawSequence(gl2, width, height, 0, sequences);
}
}
private <T extends Image> void drawSequences(GLDrawContext gl2, int width, int height, SequenceList<T> sequences) {
gl2.glTranslatef(offsetX, offsetY, 0);
int y = 0;
int seqIndex = 0;
TextDrawer drawer = gl2.getTextDrawer(EFontSize.NORMAL);
for (int i = 0; i < sequences.size(); i++) {
Sequence<T> seq = sequences.get(i);
int maxheight;
maxheight = drawSequence(gl2, width, height, y, seq);
gl2.color(0, 0, 0, 1);
drawer.drawString(20, y + 20, seqIndex + ":");
seqIndex++;
y -= maxheight + 40;
}
}
private <T extends Image> int drawSequence(GLDrawContext gl2, int width, int height, int y, Sequence<T> seq) {
int maxheight = 0;
int x = 0;
for (int index = 0; index < seq.length(); index++) {
T image = seq.getImage(index);
maxheight = Math.max(maxheight, image.getHeight());
if (x > -offsetX - 100 && x < -offsetX + width + 100 && y > -offsetY - 100 && y < -offsetY + height + 100) {
drawImage(gl2, y, index, x, (SingleImage) image);
}
x += 100;
}
return maxheight;
}
private void drawImage(GLDrawContext gl2, int y, int index, int x, SingleImage image) {
image.drawAt(gl2, x - image.getOffsetX(), y + image.getHeight() + image.getOffsetY(), colors[index % colors.length]);
gl2.color(1, 0, 0, 1);
float[] line = new float[] { x, y, 0, x, y + image.getHeight() + image.getOffsetY(), 0, x - image.getOffsetX(),
y + image.getHeight() + image.getOffsetY(), 0 };
gl2.drawLine(line, false);
drawPoint(gl2, x, y);
drawPoint(gl2, x + image.getWidth(), y);
drawPoint(gl2, x + image.getWidth(), y + image.getHeight());
drawPoint(gl2, x, y + image.getHeight());
}
private void drawPoint(GLDrawContext gl2, int x, int y) {
}
private void printHelp() {
System.out
.println("HELP:\nUse arrow keys to navigate.\nS shows settlers. \nG shows gui images. \nB shows Background. \nE exports as png");
}
}
protected void export() {
final JFileChooser fc = new JFileChooser();
fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = fc.showOpenDialog(null);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File dir = fc.getSelectedFile();
exportTo(dir, reader);
}
}
private static void exportAll() throws IOException {
File settlersGfxFolder = getSettlersGfxFolder();
final JFileChooser fc = new JFileChooser();
fc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int returnVal = fc.showOpenDialog(null);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File dir = fc.getSelectedFile();
for (int i = 0; i <= 99; i++) {
String fileName = String.format(Locale.ENGLISH, FILE_NAME_PATTERN, i);
File file = findFileIgnoringCase(settlersGfxFolder, fileName);
if (file != null && file.exists()) {
DatFileReader reader = new AdvancedDatFileReader(file, TYPE);
exportTo(new File(dir, "" + i), reader);
}
}
}
}
private static void exportTo(File dir, DatFileReader reader) {
export(reader.getSettlers(), new File(dir, "settlers"));
Sequence<GuiImage> guis = reader.getGuis();
if (guis.length() > 0) {
exportSequence(new File(dir, "gui"), 0, guis);
}
Sequence<LandscapeImage> landscapes = reader.getLandscapes();
if (landscapes.length() > 0) {
exportSequence(new File(dir, "landscape"), 1, landscapes);
}
}
private static <T extends Image> void export(SequenceList<T> sequences, File dir) {
for (int index = 0; index < sequences.size(); index++) {
Sequence<T> seq = sequences.get(index);
exportSequence(dir, index, seq);
}
}
private static <T extends Image> void exportSequence(File dir, int index, Sequence<T> seq) {
File seqdir = new File(dir, index + "");
seqdir.mkdirs();
for (int j = 0; j < seq.length(); j++) {
T image = seq.getImage(j);
export((SingleImage) image, new File(seqdir, j + ".png"));
if (image instanceof SettlerImage && ((SettlerImage) image).getTorso() != null) {
export((SingleImage) ((SettlerImage) image).getTorso(), new File(seqdir, j + "_torso.png"));
}
}
}
private static void export(SingleImage image, File file) {
// does not work if gpu does not support non-power-of-two
BufferedImage rendered = image.convertToBufferedImage();
if (rendered == null) {
return;
}
try {
ImageIO.write(rendered, "png", file);
} catch (IOException e) {
e.printStackTrace();
}
}
private static File getSettlersGfxFolder() throws IOException {
SettlersFolderChecker.SettlersFolderInfo settlersFolderInfo = SettlersFolderChecker.checkSettlersFolder(SettingsManager.getInstance().getSettlersFolder());
return settlersFolderInfo.gfxFolder;
}
private static File findFileIgnoringCase(File settlersGfxFolder, String fileName) {
String fileNameLowercase = fileName.toLowerCase();
Mutable<File> graphicsFile = new Mutable<>();
FileUtils.iterateChildren(settlersGfxFolder, currentFile -> {
if (currentFile.isFile() && fileNameLowercase.equalsIgnoreCase(currentFile.getName())) {
graphicsFile.object = currentFile;
}
});
return graphicsFile.object;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.olingo4.Olingo4Component;
/**
* Communicate with OData 4.0 services using Apache Olingo OData API.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface Olingo4ComponentBuilderFactory {
/**
* Olingo4 (camel-olingo4)
* Communicate with OData 4.0 services using Apache Olingo OData API.
*
* Category: cloud
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-olingo4
*/
static Olingo4ComponentBuilder olingo4() {
return new Olingo4ComponentBuilderImpl();
}
/**
* Builder for the Olingo4 component.
*/
interface Olingo4ComponentBuilder
extends
ComponentBuilder<Olingo4Component> {
/**
* To use the shared configuration.
*
* The option is a:
* <code>org.apache.camel.component.olingo4.Olingo4Configuration</code>
* type.
*
* Group: common
*/
default Olingo4ComponentBuilder configuration(
org.apache.camel.component.olingo4.Olingo4Configuration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* HTTP connection creation timeout in milliseconds, defaults to 30,000
* (30 seconds).
*
* The option is a: <code>int</code> type.
*
* Default: 30000
* Group: common
*/
default Olingo4ComponentBuilder connectTimeout(int connectTimeout) {
doSetProperty("connectTimeout", connectTimeout);
return this;
}
/**
* Content-Type header value can be used to specify JSON or XML message
* format, defaults to application/json;charset=utf-8.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: application/json;charset=utf-8
* Group: common
*/
default Olingo4ComponentBuilder contentType(java.lang.String contentType) {
doSetProperty("contentType", contentType);
return this;
}
/**
* Set this to true to filter out results that have already been
* communicated by this component.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*/
default Olingo4ComponentBuilder filterAlreadySeen(
boolean filterAlreadySeen) {
doSetProperty("filterAlreadySeen", filterAlreadySeen);
return this;
}
/**
* Custom HTTP headers to inject into every request, this could include
* OAuth tokens, etc.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.String></code> type.
*
* Group: common
*/
default Olingo4ComponentBuilder httpHeaders(
java.util.Map<java.lang.String, java.lang.String> httpHeaders) {
doSetProperty("httpHeaders", httpHeaders);
return this;
}
/**
* HTTP proxy server configuration.
*
* The option is a: <code>org.apache.http.HttpHost</code> type.
*
* Group: common
*/
default Olingo4ComponentBuilder proxy(org.apache.http.HttpHost proxy) {
doSetProperty("proxy", proxy);
return this;
}
/**
* Target OData service base URI, e.g.
* http://services.odata.org/OData/OData.svc.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default Olingo4ComponentBuilder serviceUri(java.lang.String serviceUri) {
doSetProperty("serviceUri", serviceUri);
return this;
}
/**
* HTTP request timeout in milliseconds, defaults to 30,000 (30
* seconds).
*
* The option is a: <code>int</code> type.
*
* Default: 30000
* Group: common
*/
default Olingo4ComponentBuilder socketTimeout(int socketTimeout) {
doSetProperty("socketTimeout", socketTimeout);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default Olingo4ComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* For endpoints that return an array or collection, a consumer endpoint
* will map every element to distinct messages, unless splitResult is
* set to false.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*/
default Olingo4ComponentBuilder splitResult(boolean splitResult) {
doSetProperty("splitResult", splitResult);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default Olingo4ComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the component should use basic property binding (Camel 2.x)
* or the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default Olingo4ComponentBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Custom HTTP async client builder for more complex HTTP client
* configuration, overrides connectionTimeout, socketTimeout, proxy and
* sslContext. Note that a socketTimeout MUST be specified in the
* builder, otherwise OData requests could block indefinitely.
*
* The option is a:
* <code>org.apache.http.impl.nio.client.HttpAsyncClientBuilder</code>
* type.
*
* Group: advanced
*/
default Olingo4ComponentBuilder httpAsyncClientBuilder(
org.apache.http.impl.nio.client.HttpAsyncClientBuilder httpAsyncClientBuilder) {
doSetProperty("httpAsyncClientBuilder", httpAsyncClientBuilder);
return this;
}
/**
* Custom HTTP client builder for more complex HTTP client
* configuration, overrides connectionTimeout, socketTimeout, proxy and
* sslContext. Note that a socketTimeout MUST be specified in the
* builder, otherwise OData requests could block indefinitely.
*
* The option is a:
* <code>org.apache.http.impl.client.HttpClientBuilder</code> type.
*
* Group: advanced
*/
default Olingo4ComponentBuilder httpClientBuilder(
org.apache.http.impl.client.HttpClientBuilder httpClientBuilder) {
doSetProperty("httpClientBuilder", httpClientBuilder);
return this;
}
/**
* To configure security using SSLContextParameters.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*/
default Olingo4ComponentBuilder sslContextParameters(
org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Enable usage of global SSL context parameters.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*/
default Olingo4ComponentBuilder useGlobalSslContextParameters(
boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
}
class Olingo4ComponentBuilderImpl
extends
AbstractComponentBuilder<Olingo4Component>
implements
Olingo4ComponentBuilder {
@Override
protected Olingo4Component buildConcreteComponent() {
return new Olingo4Component();
}
private org.apache.camel.component.olingo4.Olingo4Configuration getOrCreateConfiguration(
org.apache.camel.component.olingo4.Olingo4Component component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.olingo4.Olingo4Configuration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "configuration": ((Olingo4Component) component).setConfiguration((org.apache.camel.component.olingo4.Olingo4Configuration) value); return true;
case "connectTimeout": getOrCreateConfiguration((Olingo4Component) component).setConnectTimeout((int) value); return true;
case "contentType": getOrCreateConfiguration((Olingo4Component) component).setContentType((java.lang.String) value); return true;
case "filterAlreadySeen": getOrCreateConfiguration((Olingo4Component) component).setFilterAlreadySeen((boolean) value); return true;
case "httpHeaders": getOrCreateConfiguration((Olingo4Component) component).setHttpHeaders((java.util.Map) value); return true;
case "proxy": getOrCreateConfiguration((Olingo4Component) component).setProxy((org.apache.http.HttpHost) value); return true;
case "serviceUri": getOrCreateConfiguration((Olingo4Component) component).setServiceUri((java.lang.String) value); return true;
case "socketTimeout": getOrCreateConfiguration((Olingo4Component) component).setSocketTimeout((int) value); return true;
case "bridgeErrorHandler": ((Olingo4Component) component).setBridgeErrorHandler((boolean) value); return true;
case "splitResult": getOrCreateConfiguration((Olingo4Component) component).setSplitResult((boolean) value); return true;
case "lazyStartProducer": ((Olingo4Component) component).setLazyStartProducer((boolean) value); return true;
case "basicPropertyBinding": ((Olingo4Component) component).setBasicPropertyBinding((boolean) value); return true;
case "httpAsyncClientBuilder": getOrCreateConfiguration((Olingo4Component) component).setHttpAsyncClientBuilder((org.apache.http.impl.nio.client.HttpAsyncClientBuilder) value); return true;
case "httpClientBuilder": getOrCreateConfiguration((Olingo4Component) component).setHttpClientBuilder((org.apache.http.impl.client.HttpClientBuilder) value); return true;
case "sslContextParameters": getOrCreateConfiguration((Olingo4Component) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "useGlobalSslContextParameters": ((Olingo4Component) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
}
| |
/**
* Copyright (C) 2003 Alexander Kout
* Originally from the jFxp project (http://jfxp.sourceforge.net/).
* Copied with permission June 11, 2012 by Femi Omojola (fomojola@ideasynthesis.com).
*/
package org.java_websocket;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLEngineResult.HandshakeStatus;
import javax.net.ssl.SSLEngineResult.Status;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLSession;
import java.io.EOFException;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.ByteChannel;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
/**
* Implements the relevant portions of the SocketChannel interface with the SSLEngine wrapper.
*/
public class SSLSocketChannel2 implements ByteChannel, WrappedByteChannel {
/**
* This object is used to feed the {@link SSLEngine}'s wrap and unwrap methods during the handshake phase.
**/
protected static ByteBuffer emptybuffer = ByteBuffer.allocate( 0 );
protected ExecutorService exec;
protected List<Future<?>> tasks;
/** raw payload incomming */
protected ByteBuffer inData;
/** encrypted data outgoing */
protected ByteBuffer outCrypt;
/** encrypted data incoming */
protected ByteBuffer inCrypt;
/** the underlying channel */
protected SocketChannel socketChannel;
/** used to set interestOP SelectionKey.OP_WRITE for the underlying channel */
protected SelectionKey selectionKey;
protected SSLEngine sslEngine;
protected SSLEngineResult readEngineResult;
protected SSLEngineResult writeEngineResult;
/**
* Should be used to count the buffer allocations.
* But because of #190 where HandshakeStatus.FINISHED is not properly returned by nio wrap/unwrap this variable is used to check whether {@link #createBuffers(SSLSession)} needs to be called.
**/
protected int bufferallocations = 0;
public SSLSocketChannel2( SocketChannel channel , SSLEngine sslEngine , ExecutorService exec , SelectionKey key ) throws IOException {
if( channel == null || sslEngine == null || exec == null )
throw new IllegalArgumentException( "parameter must not be null" );
this.socketChannel = channel;
this.sslEngine = sslEngine;
this.exec = exec;
readEngineResult = writeEngineResult = new SSLEngineResult( Status.BUFFER_UNDERFLOW, sslEngine.getHandshakeStatus(), 0, 0 ); // init to prevent NPEs
tasks = new ArrayList<Future<?>>( 3 );
if( key != null ) {
key.interestOps( key.interestOps() | SelectionKey.OP_WRITE );
this.selectionKey = key;
}
createBuffers( sslEngine.getSession() );
// kick off handshake
socketChannel.write( wrap( emptybuffer ) );// initializes res
processHandshake();
}
private void consumeFutureUninterruptible( Future<?> f ) {
try {
boolean interrupted = false;
while ( true ) {
try {
f.get();
break;
} catch ( InterruptedException e ) {
interrupted = true;
}
}
if( interrupted )
Thread.currentThread().interrupt();
} catch ( ExecutionException e ) {
throw new RuntimeException( e );
}
}
/**
* This method will do whatever necessary to process the sslengine handshake.
* Thats why it's called both from the {@link #read(ByteBuffer)} and {@link #write(ByteBuffer)}
**/
private synchronized void processHandshake() throws IOException {
if( sslEngine.getHandshakeStatus() == HandshakeStatus.NOT_HANDSHAKING )
return; // since this may be called either from a reading or a writing thread and because this method is synchronized it is necessary to double check if we are still handshaking.
if( !tasks.isEmpty() ) {
Iterator<Future<?>> it = tasks.iterator();
while ( it.hasNext() ) {
Future<?> f = it.next();
if( f.isDone() ) {
it.remove();
} else {
if( isBlocking() )
consumeFutureUninterruptible( f );
return;
}
}
}
if( sslEngine.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NEED_UNWRAP ) {
if( !isBlocking() || readEngineResult.getStatus() == Status.BUFFER_UNDERFLOW ) {
inCrypt.compact();
int read = socketChannel.read( inCrypt );
if( read == -1 ) {
throw new IOException( "connection closed unexpectedly by peer" );
}
inCrypt.flip();
}
inData.compact();
unwrap();
if( readEngineResult.getHandshakeStatus() == HandshakeStatus.FINISHED ) {
createBuffers( sslEngine.getSession() );
return;
}
}
consumeDelegatedTasks();
if( tasks.isEmpty() || sslEngine.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NEED_WRAP ) {
socketChannel.write( wrap( emptybuffer ) );
if( writeEngineResult.getHandshakeStatus() == HandshakeStatus.FINISHED ) {
createBuffers( sslEngine.getSession() );
return;
}
}
assert ( sslEngine.getHandshakeStatus() != HandshakeStatus.NOT_HANDSHAKING );// this function could only leave NOT_HANDSHAKING after createBuffers was called unless #190 occurs which means that nio wrap/unwrap never return HandshakeStatus.FINISHED
bufferallocations = 1; // look at variable declaration why this line exists and #190. Without this line buffers would not be be recreated when #190 AND a rehandshake occur.
}
private synchronized ByteBuffer wrap( ByteBuffer b ) throws SSLException {
outCrypt.compact();
writeEngineResult = sslEngine.wrap( b, outCrypt );
outCrypt.flip();
return outCrypt;
}
/**
* performs the unwrap operation by unwrapping from {@link #inCrypt} to {@link #inData}
**/
private synchronized ByteBuffer unwrap() throws SSLException {
int rem;
do {
rem = inData.remaining();
readEngineResult = sslEngine.unwrap( inCrypt, inData );
} while ( readEngineResult.getStatus() == SSLEngineResult.Status.OK && ( rem != inData.remaining() || sslEngine.getHandshakeStatus() == HandshakeStatus.NEED_UNWRAP ) );
inData.flip();
return inData;
}
protected void consumeDelegatedTasks() {
Runnable task;
while ( ( task = sslEngine.getDelegatedTask() ) != null ) {
tasks.add( exec.submit( task ) );
// task.run();
}
}
protected void createBuffers( SSLSession session ) {
int netBufferMax = session.getPacketBufferSize();
int appBufferMax = Math.max(session.getApplicationBufferSize(), netBufferMax);
if( inData == null ) {
inData = ByteBuffer.allocate( appBufferMax );
outCrypt = ByteBuffer.allocate( netBufferMax );
inCrypt = ByteBuffer.allocate( netBufferMax );
} else {
if( inData.capacity() != appBufferMax )
inData = ByteBuffer.allocate( appBufferMax );
if( outCrypt.capacity() != netBufferMax )
outCrypt = ByteBuffer.allocate( netBufferMax );
if( inCrypt.capacity() != netBufferMax )
inCrypt = ByteBuffer.allocate( netBufferMax );
}
inData.rewind();
inData.flip();
inCrypt.rewind();
inCrypt.flip();
outCrypt.rewind();
outCrypt.flip();
bufferallocations++;
}
public int write( ByteBuffer src ) throws IOException {
if( !isHandShakeComplete() ) {
processHandshake();
return 0;
}
// assert ( bufferallocations > 1 ); //see #190
//if( bufferallocations <= 1 ) {
// createBuffers( sslEngine.getSession() );
//}
int num = socketChannel.write( wrap( src ) );
if (writeEngineResult.getStatus() == SSLEngineResult.Status.CLOSED) {
throw new EOFException("Connection is closed");
}
return num;
}
/**
* Blocks when in blocking mode until at least one byte has been decoded.<br>
* When not in blocking mode 0 may be returned.
*
* @return the number of bytes read.
**/
public int read( ByteBuffer dst ) throws IOException {
if( !dst.hasRemaining() )
return 0;
if( !isHandShakeComplete() ) {
if( isBlocking() ) {
while ( !isHandShakeComplete() ) {
processHandshake();
}
} else {
processHandshake();
if( !isHandShakeComplete() ) {
return 0;
}
}
}
// assert ( bufferallocations > 1 ); //see #190
//if( bufferallocations <= 1 ) {
// createBuffers( sslEngine.getSession() );
//}
/* 1. When "dst" is smaller than "inData" readRemaining will fill "dst" with data decoded in a previous read call.
* 2. When "inCrypt" contains more data than "inData" has remaining space, unwrap has to be called on more time(readRemaining)
*/
int purged = readRemaining( dst );
if( purged != 0 )
return purged;
/* We only continue when we really need more data from the network.
* Thats the case if inData is empty or inCrypt holds to less data than necessary for decryption
*/
assert ( inData.position() == 0 );
inData.clear();
if( !inCrypt.hasRemaining() )
inCrypt.clear();
else
inCrypt.compact();
if( isBlocking() || readEngineResult.getStatus() == Status.BUFFER_UNDERFLOW )
if( socketChannel.read( inCrypt ) == -1 ) {
return -1;
}
inCrypt.flip();
unwrap();
int transfered = transfereTo( inData, dst );
if( transfered == 0 && isBlocking() ) {
return read( dst ); // "transfered" may be 0 when not enough bytes were received or during rehandshaking
}
return transfered;
}
/**
* {@link #read(ByteBuffer)} may not be to leave all buffers(inData, inCrypt)
**/
private int readRemaining( ByteBuffer dst ) throws SSLException {
if( inData.hasRemaining() ) {
return transfereTo( inData, dst );
}
if( !inData.hasRemaining() )
inData.clear();
// test if some bytes left from last read (e.g. BUFFER_UNDERFLOW)
if( inCrypt.hasRemaining() ) {
unwrap();
int amount = transfereTo( inData, dst );
if (readEngineResult.getStatus() == SSLEngineResult.Status.CLOSED) {
return -1;
}
if( amount > 0 )
return amount;
}
return 0;
}
public boolean isConnected() {
return socketChannel.isConnected();
}
public void close() throws IOException {
sslEngine.closeOutbound();
sslEngine.getSession().invalidate();
if( socketChannel.isOpen() )
socketChannel.write( wrap( emptybuffer ) );// FIXME what if not all bytes can be written
socketChannel.close();
exec.shutdownNow();
}
private boolean isHandShakeComplete() {
HandshakeStatus status = sslEngine.getHandshakeStatus();
return status == SSLEngineResult.HandshakeStatus.FINISHED || status == SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING;
}
public SelectableChannel configureBlocking( boolean b ) throws IOException {
return socketChannel.configureBlocking( b );
}
public boolean connect( SocketAddress remote ) throws IOException {
return socketChannel.connect( remote );
}
public boolean finishConnect() throws IOException {
return socketChannel.finishConnect();
}
public Socket socket() {
return socketChannel.socket();
}
public boolean isInboundDone() {
return sslEngine.isInboundDone();
}
@Override
public boolean isOpen() {
return socketChannel.isOpen();
}
@Override
public boolean isNeedWrite() {
return outCrypt.hasRemaining() || !isHandShakeComplete(); // FIXME this condition can cause high cpu load during handshaking when network is slow
}
@Override
public void writeMore() throws IOException {
write( outCrypt );
}
@Override
public boolean isNeedRead() {
return inData.hasRemaining() || ( inCrypt.hasRemaining() && readEngineResult.getStatus() != Status.BUFFER_UNDERFLOW && readEngineResult.getStatus() != Status.CLOSED );
}
@Override
public int readMore( ByteBuffer dst ) throws SSLException {
return readRemaining( dst );
}
private int transfereTo( ByteBuffer from, ByteBuffer to ) {
int fremain = from.remaining();
int toremain = to.remaining();
if( fremain > toremain ) {
// FIXME there should be a more efficient transfer method
int limit = Math.min( fremain, toremain );
for( int i = 0 ; i < limit ; i++ ) {
to.put( from.get() );
}
return limit;
} else {
to.put( from );
return fremain;
}
}
@Override
public boolean isBlocking() {
return socketChannel.isBlocking();
}
}
| |
/*
* Copyright (C) 2015 Open Reply
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.reply.androidlab.ui.treenav.tree;
import java.util.ArrayList;
import java.util.List;
/**
* This is a general purpose node free to have any number of children.
*
* @author Diego Palomar
* @version 1.0.0
*
* @param <T> Type of the data stored into the node.
*/
public class TreeNode<T>
{
/**
* Data stored in the node.
*/
public T mData;
/**
* Contains the list of children
*/
public List<TreeNode<T>> mChildrenList;
/**
* Parent of the node.
*/
private TreeNode<T> mParent;
/**
* Depth level of the node into the tree. For this value to be assigned should
* first calculated the level traversal of the tree to which it belongs.
*/
private int mDepthLevel;
/**
* Simple constructor used to create a node.
*/
public TreeNode()
{
super();
mChildrenList = new ArrayList<TreeNode<T>>();
}
/**
* Simple constructor used to create a node.
*
* @param data The data to be stored in the node.
*/
public TreeNode(T data)
{
this();
setData(data);
}
public TreeNode<T> getParent()
{
return mParent;
}
public void setParent(TreeNode<T> parent)
{
mParent = parent;
}
public int getNumberOfChildren()
{
return mChildrenList.size();
}
public boolean hasChildren()
{
return !mChildrenList.isEmpty();
}
public List<TreeNode<T>> getChildren() {
return mChildrenList;
}
public int getChildIndex(TreeNode<T> child)
{
return getChildren().indexOf(child);
}
public void setChildren(List<TreeNode<T>> children)
{
for (TreeNode<T> child : children)
{
child.setParent(this);
}
mChildrenList = children;
}
public void addChild(TreeNode<T> child)
{
child.setParent(this);
child.setDepthLevel(mDepthLevel + 1);
mChildrenList.add(child);
if (child.hasChildren()) {
updateChildrenDepthLevel(child);
}
}
private void updateChildrenDepthLevel(TreeNode<T> parentNode) {
List<TreeNode<T>> children = parentNode.getChildren();
for (TreeNode<T> child : children)
{
child.setDepthLevel(parentNode.getDepthLevel() + 1);
if (child.hasChildren())
{
updateChildrenDepthLevel(child);
}
}
}
public void addAllChildren(List<? extends TreeNode<T>> children)
{
for (TreeNode<T> child : children)
{
addChild(child);
}
}
public void addChildAt(int index, TreeNode<T> child) throws IndexOutOfBoundsException
{
child.setParent(this);
child.setDepthLevel(mDepthLevel + 1);
mChildrenList.add(index, child);
}
public void removeChildren()
{
mChildrenList.clear();
}
public void removeChildAt(int index) throws IndexOutOfBoundsException
{
mChildrenList.remove(index);
}
/**
* Removes all children from the specified index.
*
* @param index Index from which the child nodes has to be removed.
*/
public void removeChildrenFrom(int index)
{
int numChildren = mChildrenList.size();
if (index < 0 || index >= numChildren)
{
throw new IndexOutOfBoundsException("Trying to remove children from index " +
index + ". Current number of children " + numChildren);
}
// Offset required because we're going to modifying a list from a loop,
// so the list indexes will be different compared with the original list.
int indexOffset = 0;
for (int i = index ; i < numChildren ; i++)
{
removeChildAt(i - indexOffset++);
}
}
public TreeNode<T> getChildAt(int index) throws IndexOutOfBoundsException
{
return mChildrenList.get(index);
}
public T getData()
{
return mData;
}
public void setData(T data)
{
mData = data;
}
public int getDepthLevel()
{
return mDepthLevel;
}
public void setDepthLevel(int level)
{
mDepthLevel = level;
}
public String toString()
{
StringBuilder objectDescription = new StringBuilder();
objectDescription.append(mData.toString()).append(" (L");
objectDescription.append(mDepthLevel).append(")");
return objectDescription.toString();
}
public boolean equals(TreeNode<T> node)
{
return mData.equals(node.getData());
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.maven.execution;
import com.intellij.debugger.impl.RemoteConnectionBuilder;
import com.intellij.debugger.settings.DebuggerSettings;
import com.intellij.execution.*;
import com.intellij.execution.application.ApplicationConfiguration;
import com.intellij.execution.configurations.*;
import com.intellij.execution.executors.DefaultRunExecutor;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ExecutionEnvironmentBuilder;
import com.intellij.execution.runners.ProgramRunner;
import com.intellij.execution.util.ExecutionErrorDialog;
import com.intellij.execution.util.JavaParametersUtil;
import com.intellij.execution.util.ProgramParametersUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkTypeId;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.task.ExecuteRunConfigurationTask;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.maven.execution.build.MavenExecutionEnvironmentProvider;
import org.jetbrains.idea.maven.project.MavenProject;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static com.intellij.openapi.util.io.FileUtil.toSystemDependentName;
import static com.intellij.openapi.util.text.StringUtil.isEmpty;
import static com.intellij.openapi.util.text.StringUtil.isNotEmpty;
import static com.intellij.util.containers.ContainerUtil.indexOf;
/**
* @author ibessonov
*/
public class MavenApplicationConfigurationExecutionEnvironmentProvider implements MavenExecutionEnvironmentProvider {
@Override
public boolean isApplicable(@NotNull ExecuteRunConfigurationTask task) {
return task.getRunProfile() instanceof ApplicationConfiguration;
}
@Override
@Nullable
public ExecutionEnvironment createExecutionEnvironment(@NotNull Project project, @NotNull ExecuteRunConfigurationTask task,
@Nullable Executor executor) {
ApplicationConfiguration applicationConfiguration = (ApplicationConfiguration)task.getRunProfile();
ConfigurationFactory configurationFactory = new MavenExecConfigurationFactory(applicationConfiguration);
String mainClassName = applicationConfiguration.getMainClassName();
if (isEmpty(mainClassName)) {
return null;
}
Module module = applicationConfiguration.getConfigurationModule().getModule();
if (module == null) {
return null;
}
MavenProject mavenProject = MavenProjectsManager.getInstance(project).findProject(module);
if (mavenProject == null) {
return null;
}
//todo: Should be merged with MavenRunConfiguration
RunnerAndConfigurationSettings runnerAndConfigurationSettings =
RunManager.getInstance(project).createConfiguration(applicationConfiguration.getName(), configurationFactory);
MyExecRunConfiguration mavenRunConfiguration = (MyExecRunConfiguration)runnerAndConfigurationSettings.getConfiguration();
mavenRunConfiguration.setBeforeRunTasks(applicationConfiguration.getBeforeRunTasks());
copyLogParameters(applicationConfiguration, mavenRunConfiguration);
MavenRunnerParameters runnerParameters = mavenRunConfiguration.getRunnerParameters();
runnerParameters.setWorkingDirPath(mavenProject.getDirectory());
runnerParameters.setPomFileName(mavenProject.getFile().getName());
JavaParameters javaParameters = new JavaParameters();
JavaParametersUtil.configureConfiguration(javaParameters, applicationConfiguration);
ParametersList execArgs = new ParametersList();
execArgs.addAll(javaParameters.getVMParametersList().getList());
execArgs.add("-classpath");
execArgs.add("%classpath");
execArgs.add(mainClassName);
execArgs.addParametersString(applicationConfiguration.getProgramParameters());
String execExecutable = getJdkExecPath(applicationConfiguration);
if (execExecutable == null) {
throw new RuntimeException(ExecutionBundle.message("run.configuration.cannot.find.vm.executable"));
}
String workingDirectory = ProgramParametersUtil.getWorkingDir(applicationConfiguration, project, module);
List<String> goals = runnerParameters.getGoals();
if (isNotEmpty(workingDirectory)) {
goals.add("-Dexec.workingdir=" + workingDirectory);
}
goals.add("-Dexec.args=" + execArgs.getParametersString());
goals.add("-Dexec.executable=" + toSystemDependentName(execExecutable));
goals.add("exec:exec");
if (executor == null) {
executor = DefaultRunExecutor.getRunExecutorInstance();
}
return new ExecutionEnvironmentBuilder(project, executor)
.runProfile(mavenRunConfiguration)
.runnerAndSettings(ProgramRunner.getRunner(executor.getId(), runnerAndConfigurationSettings.getConfiguration()), runnerAndConfigurationSettings)
.build();
}
private static String getJdkExecPath(@NotNull ApplicationConfiguration applicationConfiguration) {
Project project = applicationConfiguration.getProject();
try {
Sdk jdk = JavaParametersUtil.createProjectJdk(project, applicationConfiguration.getAlternativeJrePath());
if (jdk == null) {
throw new RuntimeException(ExecutionBundle.message("run.configuration.error.no.jdk.specified"));
}
SdkTypeId type = jdk.getSdkType();
if (!(type instanceof JavaSdkType)) {
throw new RuntimeException(ExecutionBundle.message("run.configuration.error.no.jdk.specified"));
}
return ((JavaSdkType)type).getVMExecutablePath(jdk);
}
catch (CantRunException e) {
ExecutionErrorDialog.show(e, RunnerBundle.message("dialog.title.cannot.use.specified.jre"), project);
}
return null;
}
private static void copyLogParameters(ApplicationConfiguration applicationConfiguration, MavenRunConfiguration mavenRunConfiguration) {
for (PredefinedLogFile file : applicationConfiguration.getPredefinedLogFiles()) {
mavenRunConfiguration.addPredefinedLogFile(file);
}
for (LogFileOptions op : applicationConfiguration.getLogFiles()) {
mavenRunConfiguration.addLogFile(op.getPathPattern(), op.getName(), op.isEnabled(), op.isSkipContent(), op.isShowAll());
}
mavenRunConfiguration.setFileOutputPath(applicationConfiguration.getOutputFilePath());
mavenRunConfiguration.setSaveOutputToFile(applicationConfiguration.isSaveOutputToFile());
mavenRunConfiguration.setShowConsoleOnStdOut(applicationConfiguration.isShowConsoleOnStdOut());
mavenRunConfiguration.setShowConsoleOnStdErr(applicationConfiguration.isShowConsoleOnStdErr());
}
public static List<String> patchVmParameters(ParametersList vmParameters) {
List<String> patchedVmParameters = new ArrayList<>(vmParameters.getList());
for (Iterator<String> iterator = patchedVmParameters.iterator(); iterator.hasNext(); ) {
String parameter = iterator.next();
if (parameter.contains("suspend=n,server=y")) {
iterator.remove();
patchedVmParameters.add(StringUtil.replace(parameter, "suspend=n,server=y", "suspend=y,server=y"));
break;
}
}
return patchedVmParameters;
}
public static class MyExecRunConfiguration extends MavenRunConfiguration {
private final ApplicationConfiguration myApplicationConfiguration;
MyExecRunConfiguration(Project project, ConfigurationFactory configurationFactory,
ApplicationConfiguration applicationConfiguration) {
super(project, configurationFactory, applicationConfiguration.getName());
myApplicationConfiguration = applicationConfiguration;
}
@NotNull
@Override
public RemoteConnectionCreator createRemoteConnectionCreator(JavaParameters javaParameters) {
return new RemoteConnectionCreator() {
@Override
public RemoteConnection createRemoteConnection(ExecutionEnvironment environment) {
try {
JavaParameters parameters = new JavaParameters();
parameters.setJdk(JavaParametersUtil.createProjectJdk(getProject(), myApplicationConfiguration.getAlternativeJrePath()));
RemoteConnection connection = new RemoteConnectionBuilder(false, DebuggerSettings.getInstance().getTransport(), "")
.asyncAgent(true)
.project(environment.getProject())
.memoryAgent(DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT)
.create(parameters);
ParametersList programParametersList = javaParameters.getProgramParametersList();
String execArgsPrefix = "-Dexec.args=";
int execArgsIndex = indexOf(programParametersList.getList(), s -> s.startsWith(execArgsPrefix));
String execArgsStr = programParametersList.get(execArgsIndex);
ParametersList execArgs = new ParametersList();
execArgs.addAll(patchVmParameters(parameters.getVMParametersList()));
execArgs.addParametersString(execArgsStr.substring(execArgsPrefix.length()));
String classPath = toSystemDependentName(parameters.getClassPath().getPathsString());
execArgs.replaceOrPrepend("%classpath", "%classpath" + File.pathSeparator + classPath);
programParametersList.set(execArgsIndex, execArgsPrefix + execArgs.getParametersString());
return connection;
}
catch (ExecutionException e) {
throw new RuntimeException("Cannot create debug connection", e);
}
}
@Override
public boolean isPollConnection() {
return true;
}
};
}
}
private static class MavenExecConfigurationFactory extends ConfigurationFactory {
private final ApplicationConfiguration myApplicationConfiguration;
protected MavenExecConfigurationFactory(ApplicationConfiguration applicationConfiguration) {
super(MavenRunConfigurationType.getInstance());
myApplicationConfiguration = applicationConfiguration;
}
@Override
public @NotNull String getId() {
return "Maven";
}
@NotNull
@Override
public RunConfiguration createTemplateConfiguration(@NotNull Project project) {
return new MyExecRunConfiguration(project, this, myApplicationConfiguration);
}
@NotNull
@Override
public RunConfiguration createConfiguration(@Nullable String name, @NotNull RunConfiguration template) {
return new MyExecRunConfiguration(template.getProject(), this, myApplicationConfiguration);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.utils;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.util.Arrays;
import org.apache.cassandra.io.util.FileDataInput;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.commons.lang.ArrayUtils;
/**
* Utility methods to make ByteBuffers less painful
* The following should illustrate the different ways byte buffers can be used
*
* public void testArrayOffet()
* {
*
* byte[] b = "test_slice_array".getBytes();
* ByteBuffer bb = ByteBuffer.allocate(1024);
*
* assert bb.position() == 0;
* assert bb.limit() == 1024;
* assert bb.capacity() == 1024;
*
* bb.put(b);
*
* assert bb.position() == b.length;
* assert bb.remaining() == bb.limit() - bb.position();
*
* ByteBuffer bb2 = bb.slice();
*
* assert bb2.position() == 0;
*
* //slice should begin at other buffers current position
* assert bb2.arrayOffset() == bb.position();
*
* //to match the position in the underlying array one needs to
* //track arrayOffset
* assert bb2.limit()+bb2.arrayOffset() == bb.limit();
*
*
* assert bb2.remaining() == bb.remaining();
*
* }
*
* }
*
*/
public class ByteBufferUtil
{
public static final ByteBuffer EMPTY_BYTE_BUFFER = ByteBuffer.wrap(ArrayUtils.EMPTY_BYTE_ARRAY);
public static final byte[] EMPTY_BYTES = new byte[0];
private static final Charset UTF_8 = Charset.forName("UTF-8");
public static int compareUnsigned(ByteBuffer o1, ByteBuffer o2)
{
assert o1 != null;
assert o2 != null;
int minLength = Math.min(o1.remaining(), o2.remaining());
for (int x = 0, i = o1.position(), j = o2.position(); x < minLength; x++, i++, j++)
{
if (o1.get(i) == o2.get(j))
continue;
// compare non-equal bytes as unsigned
return (o1.get(i) & 0xFF) < (o2.get(j) & 0xFF) ? -1 : 1;
}
return (o1.remaining() == o2.remaining()) ? 0 : ((o1.remaining() < o2.remaining()) ? -1 : 1);
}
public static int compare(byte[] o1, ByteBuffer o2)
{
return compareUnsigned(ByteBuffer.wrap(o1), o2);
}
public static int compare(ByteBuffer o1, byte[] o2)
{
return compareUnsigned(o1, ByteBuffer.wrap(o2));
}
/**
* Decode a String representation.
* This method assumes that the encoding charset is UTF_8.
*
* @param buffer a byte buffer holding the string representation
* @return the decoded string
*/
public static String string(ByteBuffer buffer) throws CharacterCodingException
{
return string(buffer, UTF_8);
}
/**
* Decode a String representation.
* This method assumes that the encoding charset is UTF_8.
*
* @param buffer a byte buffer holding the string representation
* @param position the starting position in {@code buffer} to start decoding from
* @param length the number of bytes from {@code buffer} to use
* @return the decoded string
*/
public static String string(ByteBuffer buffer, int position, int length) throws CharacterCodingException
{
return string(buffer, position, length, UTF_8);
}
/**
* Decode a String representation.
*
* @param buffer a byte buffer holding the string representation
* @param position the starting position in {@code buffer} to start decoding from
* @param length the number of bytes from {@code buffer} to use
* @param charset the String encoding charset
* @return the decoded string
*/
public static String string(ByteBuffer buffer, int position, int length, Charset charset) throws CharacterCodingException
{
ByteBuffer copy = buffer.duplicate();
copy.position(position);
copy.limit(copy.position() + length);
return string(copy, charset);
}
/**
* Decode a String representation.
*
* @param buffer a byte buffer holding the string representation
* @param charset the String encoding charset
* @return the decoded string
*/
public static String string(ByteBuffer buffer, Charset charset) throws CharacterCodingException
{
return charset.newDecoder().decode(buffer.duplicate()).toString();
}
/**
* You should almost never use this. Instead, use the write* methods to avoid copies.
*/
public static byte[] getArray(ByteBuffer buffer)
{
int length = buffer.remaining();
if (buffer.hasArray())
{
int start = buffer.position();
if (buffer.arrayOffset() == 0 && start == 0 && length == buffer.array().length)
return buffer.array();
else
return Arrays.copyOfRange(buffer.array(), start + buffer.arrayOffset(), start + length + buffer.arrayOffset());
}
// else, DirectByteBuffer.get() is the fastest route
byte[] bytes = new byte[length];
buffer.duplicate().get(bytes);
return bytes;
}
/**
* ByteBuffer adaptation of org.apache.commons.lang.ArrayUtils.lastIndexOf method
*
* @param buffer the array to traverse for looking for the object, may be <code>null</code>
* @param valueToFind the value to find
* @param startIndex the start index (i.e. BB position) to travers backwards from
* @return the last index (i.e. BB position) of the value within the array
* [between buffer.position() and buffer.limit()]; <code>-1</code> if not found.
*/
public static int lastIndexOf(ByteBuffer buffer, byte valueToFind, int startIndex)
{
assert buffer != null;
if (startIndex < buffer.position())
{
return -1;
}
else if (startIndex >= buffer.limit())
{
startIndex = buffer.limit() - 1;
}
for (int i = startIndex; i >= buffer.position(); i--)
{
if (valueToFind == buffer.get(i))
return i;
}
return -1;
}
/**
* Encode a String in a ByteBuffer using UTF_8.
*
* @param s the string to encode
* @return the encoded string
*/
public static ByteBuffer bytes(String s)
{
return ByteBuffer.wrap(s.getBytes(UTF_8));
}
/**
* Encode a String in a ByteBuffer using the provided charset.
*
* @param s the string to encode
* @param charset the String encoding charset to use
* @return the encoded string
*/
public static ByteBuffer bytes(String s, Charset charset)
{
return ByteBuffer.wrap(s.getBytes(charset));
}
/**
* @return a new copy of the data in @param buffer
* USUALLY YOU SHOULD USE ByteBuffer.duplicate() INSTEAD, which creates a new Buffer
* (so you can mutate its position without affecting the original) without copying the underlying array.
*/
public static ByteBuffer clone(ByteBuffer buffer)
{
assert buffer != null;
if (buffer.remaining() == 0)
return EMPTY_BYTE_BUFFER;
ByteBuffer clone = ByteBuffer.allocate(buffer.remaining());
if (buffer.hasArray())
{
System.arraycopy(buffer.array(), buffer.arrayOffset() + buffer.position(), clone.array(), 0, buffer.remaining());
}
else
{
clone.put(buffer.duplicate());
clone.flip();
}
return clone;
}
public static void arrayCopy(ByteBuffer buffer, int position, byte[] bytes, int offset, int length)
{
if (buffer.hasArray())
System.arraycopy(buffer.array(), buffer.arrayOffset() + position, bytes, offset, length);
else
((ByteBuffer) buffer.duplicate().position(position)).get(bytes, offset, length);
}
/**
* Transfer bytes from one ByteBuffer to another.
* This function acts as System.arrayCopy() but for ByteBuffers.
*
* @param src the source ByteBuffer
* @param srcPos starting position in the source ByteBuffer
* @param dst the destination ByteBuffer
* @param dstPos starting position in the destination ByteBuffer
* @param length the number of bytes to copy
*/
public static void arrayCopy(ByteBuffer src, int srcPos, ByteBuffer dst, int dstPos, int length)
{
if (src.hasArray() && dst.hasArray())
{
System.arraycopy(src.array(),
src.arrayOffset() + srcPos,
dst.array(),
dst.arrayOffset() + dstPos,
length);
}
else
{
if (src.limit() - srcPos < length || dst.limit() - dstPos < length)
throw new IndexOutOfBoundsException();
for (int i = 0; i < length; i++)
{
dst.put(dstPos++, src.get(srcPos++));
}
}
}
public static void writeWithLength(ByteBuffer bytes, DataOutput out) throws IOException
{
out.writeInt(bytes.remaining());
write(bytes, out); // writing data bytes to output source
}
public static void write(ByteBuffer buffer, DataOutput out) throws IOException
{
if (buffer.hasArray())
{
out.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining());
}
else
{
for (int i = buffer.position(); i < buffer.limit(); i++)
{
out.writeByte(buffer.get(i));
}
}
}
/* @return An unsigned short in an integer. */
private static int readShortLength(DataInput in) throws IOException
{
int length = (in.readByte() & 0xFF) << 8;
return length | (in.readByte() & 0xFF);
}
/**
* Convert a byte buffer to an integer.
* Does not change the byte buffer position.
*
* @param bytes byte buffer to convert to integer
* @return int representation of the byte buffer
*/
public static int toInt(ByteBuffer bytes)
{
return bytes.getInt(bytes.position());
}
public static long toLong(ByteBuffer bytes)
{
return bytes.getLong(bytes.position());
}
public static float toFloat(ByteBuffer bytes)
{
return bytes.getFloat(bytes.position());
}
public static double toDouble(ByteBuffer bytes)
{
return bytes.getDouble(bytes.position());
}
public static ByteBuffer bytes(int i)
{
return ByteBuffer.allocate(4).putInt(0, i);
}
public static ByteBuffer bytes(long n)
{
return ByteBuffer.allocate(8).putLong(0, n);
}
public static ByteBuffer bytes(float f)
{
return ByteBuffer.allocate(4).putFloat(0, f);
}
public static ByteBuffer bytes(double d)
{
return ByteBuffer.allocate(8).putDouble(0, d);
}
public static InputStream inputStream(ByteBuffer bytes)
{
final ByteBuffer copy = bytes.duplicate();
return new InputStream()
{
public int read() throws IOException
{
if (!copy.hasRemaining())
return -1;
return copy.get() & 0xFF;
}
@Override
public int read(byte[] bytes, int off, int len) throws IOException
{
if (!copy.hasRemaining())
return -1;
len = Math.min(len, copy.remaining());
copy.get(bytes, off, len);
return len;
}
@Override
public int available() throws IOException
{
return copy.remaining();
}
};
}
public static ByteBuffer hexToBytes(String str)
{
return ByteBuffer.wrap(FBUtilities.hexToBytes(str));
}
/**
* Compare two ByteBuffer at specified offsets for length.
* Compares the non equal bytes as unsigned.
* @param bytes1 First byte buffer to compare.
* @param offset1 Position to start the comparison at in the first array.
* @param bytes2 Second byte buffer to compare.
* @param offset2 Position to start the comparison at in the second array.
* @param length How many bytes to compare?
* @return -1 if byte1 is less than byte2, 1 if byte2 is less than byte1 or 0 if equal.
*/
public static int compareSubArrays(ByteBuffer bytes1, int offset1, ByteBuffer bytes2, int offset2, int length)
{
if ( null == bytes1 )
{
if ( null == bytes2) return 0;
else return -1;
}
if (null == bytes2 ) return 1;
assert bytes1.limit() >= offset1 + length : "The first byte array isn't long enough for the specified offset and length.";
assert bytes2.limit() >= offset2 + length : "The second byte array isn't long enough for the specified offset and length.";
for ( int i = 0; i < length; i++ )
{
byte byte1 = bytes1.get(offset1 + i);
byte byte2 = bytes2.get(offset2 + i);
if ( byte1 == byte2 )
continue;
// compare non-equal bytes as unsigned
return (byte1 & 0xFF) < (byte2 & 0xFF) ? -1 : 1;
}
return 0;
}
}
| |
package io.ray.test;
import com.google.common.base.Preconditions;
import io.ray.api.ActorHandle;
import io.ray.api.ObjectRef;
import io.ray.api.PyActorHandle;
import io.ray.api.Ray;
import io.ray.api.function.PyActorClass;
import io.ray.api.function.PyActorMethod;
import io.ray.api.function.PyFunction;
import io.ray.runtime.actor.NativeActorHandle;
import io.ray.runtime.exception.CrossLanguageException;
import io.ray.runtime.exception.RayException;
import io.ray.runtime.generated.Common.Language;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(groups = {"cluster"})
public class CrossLanguageInvocationTest extends BaseTest {
private static final String PYTHON_MODULE = "test_cross_language_invocation";
@BeforeClass
public void beforeClass() {
// Delete and re-create the temp dir.
File tempDir =
new File(System.getProperty("java.io.tmpdir") + File.separator + "ray_cross_language_test");
FileUtils.deleteQuietly(tempDir);
tempDir.mkdirs();
tempDir.deleteOnExit();
// Write the test Python file to the temp dir.
InputStream in =
CrossLanguageInvocationTest.class.getResourceAsStream("/" + PYTHON_MODULE + ".py");
File pythonFile = new File(tempDir.getAbsolutePath() + File.separator + PYTHON_MODULE + ".py");
try {
FileUtils.copyInputStreamToFile(in, pythonFile);
} catch (IOException e) {
throw new RuntimeException(e);
}
System.setProperty(
"ray.job.code-search-path",
System.getProperty("java.class.path") + File.pathSeparator + tempDir.getAbsolutePath());
}
@Test
public void testCallingPythonFunction() {
Object[] inputs =
new Object[] {
true, // Boolean
Byte.MAX_VALUE, // Byte
Short.MAX_VALUE, // Short
Integer.MAX_VALUE, // Integer
Long.MAX_VALUE, // Long
// BigInteger can support max value of 2^64-1, please refer to:
// https://github.com/msgpack/msgpack/blob/master/spec.md#int-format-family
// If BigInteger larger than 2^64-1, the value can only be transferred among Java workers.
BigInteger.valueOf(Long.MAX_VALUE), // BigInteger
"Hello World!", // String
1.234f, // Float
1.234, // Double
"example binary".getBytes()
}; // byte[]
for (Object o : inputs) {
ObjectRef res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_return_input", o.getClass()), o).remote();
Assert.assertEquals(res.get(), o);
}
// null
{
Object input = null;
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_return_input", Object.class), input).remote();
Object r = res.get();
Assert.assertEquals(r, input);
}
// array
{
int[] input = new int[] {1, 2};
ObjectRef<int[]> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_return_input", int[].class), input).remote();
int[] r = res.get();
Assert.assertEquals(r, input);
}
// array of Object
{
Object[] input =
new Object[] {1, 2.3f, 4.56, "789", "10".getBytes(), null, true, new int[] {1, 2}};
ObjectRef<Object[]> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_return_input", Object[].class), input).remote();
Object[] r = res.get();
// If we tell the value type is Object, then all numbers will be Number type.
Assert.assertEquals(((Number) r[0]).intValue(), input[0]);
Assert.assertEquals(((Number) r[1]).floatValue(), input[1]);
Assert.assertEquals(((Number) r[2]).doubleValue(), input[2]);
// String cast
Assert.assertEquals((String) r[3], input[3]);
// binary cast
Assert.assertEquals((byte[]) r[4], input[4]);
// null
Assert.assertEquals(r[5], input[5]);
// Boolean cast
Assert.assertEquals((Boolean) r[6], input[6]);
// array cast
Object[] r7array = (Object[]) r[7];
int[] input7array = (int[]) input[7];
Assert.assertEquals(((Number) r7array[0]).intValue(), input7array[0]);
Assert.assertEquals(((Number) r7array[1]).intValue(), input7array[1]);
}
// Unsupported types, all Java specific types, e.g. List / Map...
{
Assert.expectThrows(
Exception.class,
() -> {
List<Integer> input = Arrays.asList(1, 2);
ObjectRef<List<Integer>> res =
Ray.task(
PyFunction.of(
PYTHON_MODULE,
"py_return_input",
(Class<List<Integer>>) input.getClass()),
input)
.remote();
List<Integer> r = res.get();
Assert.assertEquals(r, input);
});
}
}
@Test
public void testPythonCallJavaFunction() {
ObjectRef<String> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_call_java_function", String.class)).remote();
Assert.assertEquals(res.get(), "success");
}
@Test
public void testCallingPythonActor() {
PyActorHandle actor =
Ray.actor(PyActorClass.of(PYTHON_MODULE, "Counter"), "1".getBytes()).remote();
ObjectRef<byte[]> res =
actor.task(PyActorMethod.of("increase", byte[].class), "1".getBytes()).remote();
Assert.assertEquals(res.get(), "2".getBytes());
}
@Test
public void testPythonCallJavaActor() {
ObjectRef<byte[]> res =
Ray.task(
PyFunction.of(PYTHON_MODULE, "py_func_call_java_actor", byte[].class),
"1".getBytes())
.remote();
Assert.assertEquals(res.get(), "Counter1".getBytes());
}
@Test
public void testPassActorHandleFromPythonToJava() {
// Call a python function which creates a python actor
// and pass the actor handle to callPythonActorHandle.
ObjectRef<byte[]> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_pass_python_actor_handle", byte[].class))
.remote();
Assert.assertEquals(res.get(), "3".getBytes());
}
@Test
public void testPassActorHandleFromJavaToPython() {
// Create a java actor, and pass actor handle to python.
ActorHandle<TestActor> javaActor = Ray.actor(TestActor::new, "1".getBytes()).remote();
Preconditions.checkState(javaActor instanceof NativeActorHandle);
ObjectRef<byte[]> res =
Ray.task(
PyFunction.of(PYTHON_MODULE, "py_func_call_java_actor_from_handle", byte[].class),
javaActor)
.remote();
Assert.assertEquals(res.get(), "12".getBytes());
// Create a python actor, and pass actor handle to python.
PyActorHandle pyActor =
Ray.actor(PyActorClass.of(PYTHON_MODULE, "Counter"), "1".getBytes()).remote();
Preconditions.checkState(pyActor instanceof NativeActorHandle);
res =
Ray.task(
PyFunction.of(PYTHON_MODULE, "py_func_call_python_actor_from_handle", byte[].class),
pyActor)
.remote();
Assert.assertEquals(res.get(), "3".getBytes());
}
@Test
public void testExceptionSerialization() throws IOException {
try {
throw new RayException("Test Exception");
} catch (RayException e) {
String formattedException =
org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e);
io.ray.runtime.generated.Common.RayException exception =
io.ray.runtime.generated.Common.RayException.parseFrom(e.toBytes());
Assert.assertEquals(exception.getFormattedExceptionString(), formattedException);
}
}
@Test
public void testRaiseExceptionFromPython() {
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_python_raise_exception", Object.class))
.remote();
try {
res.get();
} catch (RuntimeException ex) {
// ex is a Python exception(py_func_python_raise_exception) with no cause.
Assert.assertTrue(ex instanceof CrossLanguageException);
CrossLanguageException e = (CrossLanguageException) ex;
Assert.assertEquals(e.getLanguage(), Language.PYTHON);
// ex.cause is null.
Assert.assertNull(ex.getCause());
Assert.assertTrue(
ex.getMessage().contains("ZeroDivisionError: division by zero"), ex.getMessage());
return;
}
Assert.fail();
}
@Test
public void testThrowExceptionFromJava() {
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_java_throw_exception", Object.class))
.remote();
try {
res.get();
} catch (RuntimeException ex) {
final String message = ex.getMessage();
Assert.assertTrue(message.contains("py_func_java_throw_exception"), message);
Assert.assertTrue(
message.contains("io.ray.test.CrossLanguageInvocationTest.throwException"), message);
Assert.assertTrue(message.contains("java.lang.ArithmeticException: / by zero"), message);
return;
}
Assert.fail();
}
@Test
public void testRaiseExceptionFromNestPython() {
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_nest_python_raise_exception", Object.class))
.remote();
try {
res.get();
} catch (RuntimeException ex) {
final String message = ex.getMessage();
Assert.assertTrue(message.contains("py_func_nest_python_raise_exception"), message);
Assert.assertTrue(message.contains("io.ray.runtime.task.TaskExecutor.execute"), message);
Assert.assertTrue(message.contains("py_func_python_raise_exception"), message);
Assert.assertTrue(message.contains("ZeroDivisionError: division by zero"), message);
return;
}
Assert.fail();
}
@Test
public void testThrowExceptionFromNestJava() {
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_nest_java_throw_exception", Object.class))
.remote();
try {
res.get();
} catch (RuntimeException ex) {
final String message = ex.getMessage();
Assert.assertTrue(message.contains("py_func_nest_java_throw_exception"), message);
Assert.assertEquals(
org.apache.commons.lang3.StringUtils.countMatches(
message, "io.ray.runtime.exception.RayTaskException"),
2);
Assert.assertTrue(message.contains("py_func_java_throw_exception"), message);
Assert.assertTrue(message.contains("java.lang.ArithmeticException: / by zero"), message);
return;
}
Assert.fail();
}
public static Object[] pack(int i, String s, double f, Object[] o) {
// This function will be called from test_cross_language_invocation.py
return new Object[] {i, s, f, o};
}
public static Object returnInput(Object o) {
return o;
}
public static boolean returnInputBoolean(boolean b) {
return b;
}
public static int returnInputInt(int i) {
return i;
}
public static double returnInputDouble(double d) {
return d;
}
public static String returnInputString(String s) {
return s;
}
public static int[] returnInputIntArray(int[] l) {
return l;
}
public static byte[] callPythonActorHandle(PyActorHandle actor) {
// This function will be called from test_cross_language_invocation.py
ObjectRef<byte[]> res =
actor.task(PyActorMethod.of("increase", byte[].class), "1".getBytes()).remote();
Assert.assertEquals(res.get(), "3".getBytes());
return (byte[]) res.get();
}
@SuppressWarnings("ConstantOverflow")
public static Object throwException() {
return 1 / 0;
}
public static Object throwJavaException() {
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_java_throw_exception", Object.class))
.remote();
return res.get();
}
public static Object raisePythonException() {
ObjectRef<Object> res =
Ray.task(PyFunction.of(PYTHON_MODULE, "py_func_python_raise_exception", Object.class))
.remote();
return res.get();
}
public static class TestActor {
public TestActor(byte[] v) {
value = v;
}
public byte[] concat(byte[] v) {
byte[] c = new byte[value.length + v.length];
System.arraycopy(value, 0, c, 0, value.length);
System.arraycopy(v, 0, c, value.length, v.length);
return c;
}
private byte[] value;
}
}
| |
// This file was generated by Mendix Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package cassandra.proxies;
public class Query
{
private final com.mendix.systemwideinterfaces.core.IMendixObject queryMendixObject;
private final com.mendix.systemwideinterfaces.core.IContext context;
/**
* Internal name of this entity
*/
public static final java.lang.String entityName = "Cassandra.Query";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
Query("Query");
private java.lang.String metaName;
MemberNames(java.lang.String s)
{
metaName = s;
}
@Override
public java.lang.String toString()
{
return metaName;
}
}
public Query(com.mendix.systemwideinterfaces.core.IContext context)
{
this(context, com.mendix.core.Core.instantiate(context, "Cassandra.Query"));
}
protected Query(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject queryMendixObject)
{
if (queryMendixObject == null)
throw new java.lang.IllegalArgumentException("The given object cannot be null.");
if (!com.mendix.core.Core.isSubClassOf("Cassandra.Query", queryMendixObject.getType()))
throw new java.lang.IllegalArgumentException("The given object is not a Cassandra.Query");
this.queryMendixObject = queryMendixObject;
this.context = context;
}
/**
* @deprecated Use 'Query.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static cassandra.proxies.Query initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
return cassandra.proxies.Query.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.createSudoClone() can be used to obtain sudo access).
*/
public static cassandra.proxies.Query initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject)
{
return new cassandra.proxies.Query(context, mendixObject);
}
public static cassandra.proxies.Query load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier);
return cassandra.proxies.Query.initialize(context, mendixObject);
}
public static java.util.List<cassandra.proxies.Query> load(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String xpathConstraint) throws com.mendix.core.CoreException
{
java.util.List<cassandra.proxies.Query> result = new java.util.ArrayList<cassandra.proxies.Query>();
for (com.mendix.systemwideinterfaces.core.IMendixObject obj : com.mendix.core.Core.retrieveXPathQuery(context, "//Cassandra.Query" + xpathConstraint))
result.add(cassandra.proxies.Query.initialize(context, obj));
return result;
}
/**
* Commit the changes made on this proxy object.
*/
public final void commit() throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Commit the changes made on this proxy object using the specified context.
*/
public final void commit(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Delete the object.
*/
public final void delete()
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* Delete the object using the specified context.
*/
public final void delete(com.mendix.systemwideinterfaces.core.IContext context)
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* @return value of Query
*/
public final java.lang.String getQuery()
{
return getQuery(getContext());
}
/**
* @param context
* @return value of Query
*/
public final java.lang.String getQuery(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Query.toString());
}
/**
* Set value of Query
* @param query
*/
public final void setQuery(java.lang.String query)
{
setQuery(getContext(), query);
}
/**
* Set value of Query
* @param context
* @param query
*/
public final void setQuery(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String query)
{
getMendixObject().setValue(context, MemberNames.Query.toString(), query);
}
/**
* @return the IMendixObject instance of this proxy for use in the Core interface.
*/
public final com.mendix.systemwideinterfaces.core.IMendixObject getMendixObject()
{
return queryMendixObject;
}
/**
* @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization.
*/
public final com.mendix.systemwideinterfaces.core.IContext getContext()
{
return context;
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final cassandra.proxies.Query that = (cassandra.proxies.Query) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static java.lang.String getType()
{
return "Cassandra.Query";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Deprecated
public java.lang.String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
/*******************************************************************************
* Copyright (C) 2015 Brocade Communications Systems, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* https://github.com/brocade/vTM-eclipse/LICENSE
* This software is distributed "AS IS".
*
* Contributors:
* Brocade Communications Systems - Main Implementation
******************************************************************************/
package com.zeus.eclipsePlugin.editor.assist;
import java.util.Collection;
import java.util.LinkedList;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.ITextViewer;
import org.eclipse.jface.text.contentassist.CompletionProposal;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.jface.text.contentassist.IContentAssistProcessor;
import org.eclipse.jface.text.contentassist.IContextInformation;
import org.eclipse.jface.text.contentassist.IContextInformationValidator;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import com.zeus.eclipsePlugin.ImageManager;
import com.zeus.eclipsePlugin.PreferenceManager;
import com.zeus.eclipsePlugin.ZDebug;
import com.zeus.eclipsePlugin.ZLang;
import com.zeus.eclipsePlugin.ZXTMPlugin;
import com.zeus.eclipsePlugin.codedata.CodePossibility;
import com.zeus.eclipsePlugin.codedata.Function;
import com.zeus.eclipsePlugin.codedata.FunctionGroup;
import com.zeus.eclipsePlugin.codedata.VersionCodeData;
import com.zeus.eclipsePlugin.codedata.CodePossibility.Type;
import com.zeus.eclipsePlugin.consts.ImageFile;
import com.zeus.eclipsePlugin.consts.Partition;
import com.zeus.eclipsePlugin.consts.Preference;
import com.zeus.eclipsePlugin.editor.CodeLine;
import com.zeus.eclipsePlugin.editor.CodeUtil;
import com.zeus.eclipsePlugin.editor.TrafficScriptEditor;
import com.zeus.eclipsePlugin.swt.SWTUtil;
/**
* Controls the display of auto-complete possibilities. Is an interface used by
* the editor's TrafficScriptAssistant.
*/
public class TrafficScriptAssistantProcessor implements IContentAssistProcessor
{
private TrafficScriptEditor editor;
private static Type[] types = null;
/**
* Update the types of possibilities that are suggested.
*/
public static void updateTypes()
{
LinkedList<Type> typeList = new LinkedList<Type>();
if( PreferenceManager.getPreferenceBool( Preference.ASSIST_GROUP ) ) {
typeList.add( Type.GROUP );
}
if( PreferenceManager.getPreferenceBool( Preference.ASSIST_FUNC ) ) {
typeList.add( Type.FUNCTION );
}
if( PreferenceManager.getPreferenceBool( Preference.ASSIST_KEYWORDS ) ) {
typeList.add( Type.KEYWORD );
}
types = typeList.toArray( new Type[typeList.size()] );
}
/**
* Get the types of possibilities we are to suggest.
* @return An array of possibilities types that we care about.
*/
public static Type[] getTypes()
{
if( types == null ) updateTypes();
return types;
}
/**
* Create an assistant processor for a particular editor.
* @param editor The editor this assistant processor is calculating the
* possibilities for.
*/
public TrafficScriptAssistantProcessor( TrafficScriptEditor editor )
{
this.editor = editor;
}
/**
* This works out what word (if any) is before the current offset and
* returns suggestions to complete the word into functions and groups.
*/
/* Override */
public ICompletionProposal[] computeCompletionProposals( ITextViewer viewer,
int offset )
{
ZDebug.print( 3, "computeCompletionProposals( ", viewer, ", ", offset, " )" );
try {
IDocument doc = viewer.getDocument();
ImageManager images = ZXTMPlugin.getDefault().getImageManager();
// Should we show function proposals after selecting a group?
boolean groupShowFuncs = PreferenceManager.getPreferenceBool(
Preference.ASSIST_GROUP_FUNC
);
CodeLine lineInfo = CodeUtil.getLineAreas( doc, offset );
ZDebug.print( 7, lineInfo );
Partition partition = lineInfo.getRegionType( offset );
ZDebug.print( 4, "Offset Partition: ", partition );
if( partition != Partition.CODE ) {
return null;
}
int lastN = Math.max( 0, offset - 50 );
String before = doc.get( lastN, offset - lastN );
int wordStart = before.length() - 1;
while( wordStart >= 0 ) {
char c = before.charAt( wordStart );
if( !Character.isLetterOrDigit( c ) && c != '.' && c != '_' )
{
break;
}
wordStart--;
}
if( wordStart < -1 ) return null;
// The word before the dot, e.g. string. == 'string'
String lastWord = before.substring( wordStart + 1 ).trim();
ZDebug.print( 5, "Last Word: ", lastWord );
VersionCodeData codeVer = editor.getCodeDataVersion();
Collection<CodePossibility> suggestions =
codeVer.getPossiblilities( lastWord, getTypes() );
LinkedList<ICompletionProposal> list = new LinkedList<ICompletionProposal>();
for( CodePossibility suggestion : suggestions ) {
switch( suggestion.getType() ) {
case KEYWORD: {
String keyword = suggestion.getName();
list.add( new ZCompletionProposal(
keyword,
offset - suggestion.getCurrentText().length(),
suggestion.getCurrentText().length(),
keyword.length(),
null,
keyword,
null,
Type.KEYWORD
) );
break;
}
case GROUP: {
FunctionGroup group = suggestion.getFunctionGroup();
String toInsert = group.getName();
if( groupShowFuncs ) {
toInsert += ".";
}
list.add( new ZCompletionProposal(
toInsert,
offset - suggestion.getCurrentText().length(),
suggestion.getCurrentText().length(),
toInsert.length(),
images.getImage( ImageFile.GROUP ),
group.getName(),
group.getDescription(),
Type.GROUP
) );
break;
}
case FUNCTION: {
Function func = suggestion.getFunction();
String name = func.getName();
StringBuffer replace = new StringBuffer( name );
StringBuffer option = new StringBuffer( name );
int cursor = 0;
if( func.getParams() == null || func.getParams().length == 0 ) {
replace.append( "()" );
option.append( "()" );
cursor = name.length() + 2;
} else {
replace.append( "( )" );
option.append( "( " );
boolean comma = false;
for( String param : func.getParams() ) {
if( comma ) {
option.append( ", " );
}
option.append( param );
comma = true;
}
option.append( " )" );
cursor = name.length() + 2;
}
list.add( new ZCompletionProposal(
replace.toString(),
offset - suggestion.getCurrentText().length(),
suggestion.getCurrentText().length(),
cursor,
images.getImage( ImageFile.FUNC ),
option.toString(),
func.getFullDescription(),
Type.FUNCTION
) );
break;
}
}
}
return list.toArray( new ICompletionProposal[list.size()] );
} catch( BadLocationException e ) {
ZDebug.printStackTrace( e, "Error whilst working out code completions" );
}
return null;
}
/**
* Context info is done by the PopupManager class, we don't use Eclipse's
* built in context stuff.
*/
/* Override */
public IContextInformation[] computeContextInformation( ITextViewer viewer,
int offset )
{
return null;
}
/**
* Which characters should the suggestions auto appear for.
*/
/* Override */
public char[] getCompletionProposalAutoActivationCharacters()
{
return new char[] { '.' };
}
/** We don't do context stuff using Eclipse's code. */
/* Override */
public char[] getContextInformationAutoActivationCharacters()
{
return null;
}
/** We don't do context stuff using Eclipse's code. */
/* Override */
public IContextInformationValidator getContextInformationValidator()
{
return null;
}
/**
* The error message when we have no completions.
*/
/* Override */
public String getErrorMessage()
{
return ZLang.ZL_NoCompletionsAvailable;
}
/**
* This is our custom Completion Proposal class. It allows us to know when
* the user picks a completion to insert.
*/
private class ZCompletionProposal implements ICompletionProposal
{
private CompletionProposal wrapped;
private Type type;
/**
* Create a completion proposal with normal properties.
*
* @param replacementString The string to replace
* @param replacetmentOffset Where to replace from
* @param replacementLength How long is the replacement
* @param cursorPosition The cursor position after completion
* @param image The icon to display with the proposal
* @param displayString The string to display in the proposal
* @param additionalProposalInfo Detailed info to be displayed in a popup.
* @param type The type of this proposal
*/
public ZCompletionProposal( String replacementString,
int replacetmentOffset, int replacementLength, int cursorPosition,
Image image, String displayString, String additionalProposalInfo, Type type )
{
this.wrapped = new CompletionProposal(
replacementString,
replacetmentOffset,
replacementLength,
cursorPosition,
image,
displayString,
null,
additionalProposalInfo
);
this.type = type;
}
/**
* When the completion is applied to a document, check if we want to go
* straight into another completion.
*/
/* Override */
public void apply( IDocument document )
{
wrapped.apply( document );
// If we just inserted a function group we may want to go straight into
// showing its functions.
if( type == Type.GROUP &&
PreferenceManager.getPreferenceBool( Preference.ASSIST_GROUP_FUNC ) ) {
// We need to wait a bit before showing the new completions.
Runnable run = new Runnable() { public void run() {
try { Thread.sleep( 100 ); } catch( Exception e ) {}
SWTUtil.exec( new Runnable() { public void run() {
editor.getAssistant().showPossibleCompletions();
} } );
} };
Thread thread = new Thread( run );
thread.start();
}
}
/* Override */
public String getAdditionalProposalInfo()
{
return wrapped.getAdditionalProposalInfo();
}
/* Override */
public IContextInformation getContextInformation()
{
return wrapped.getContextInformation();
}
/* Override */
public String getDisplayString()
{
return wrapped.getDisplayString();
}
/* Override */
public Image getImage()
{
return wrapped.getImage();
}
/* Override */
public Point getSelection( IDocument document )
{
return wrapped.getSelection( document );
}
}
}
| |
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.analyzer;
import com.github.packageurl.MalformedPackageURLException;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStream;
import org.owasp.dependencycheck.Engine;
import org.owasp.dependencycheck.analyzer.exception.AnalysisException;
import org.owasp.dependencycheck.dependency.Confidence;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.FileFilterBuilder;
import org.owasp.dependencycheck.utils.FileUtils;
import org.owasp.dependencycheck.utils.Settings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.commons.lang3.StringUtils;
import org.owasp.dependencycheck.data.nvd.ecosystem.Ecosystem;
import org.owasp.dependencycheck.exception.InitializationException;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.dependency.naming.GenericIdentifier;
import org.owasp.dependencycheck.dependency.naming.PurlIdentifier;
import org.owasp.dependencycheck.processing.GrokAssemblyProcessor;
import org.owasp.dependencycheck.utils.DependencyVersion;
import org.owasp.dependencycheck.utils.DependencyVersionUtil;
import org.owasp.dependencycheck.utils.ExtractionException;
import org.owasp.dependencycheck.utils.ExtractionUtil;
import org.owasp.dependencycheck.utils.processing.ProcessReader;
import org.owasp.dependencycheck.xml.assembly.AssemblyData;
import org.owasp.dependencycheck.xml.assembly.GrokParseException;
/**
* Analyzer for getting company, product, and version information from a .NET
* assembly.
*
* @author colezlaw
*/
@ThreadSafe
public class AssemblyAnalyzer extends AbstractFileTypeAnalyzer {
/**
* Logger
*/
private static final Logger LOGGER = LoggerFactory.getLogger(AssemblyAnalyzer.class);
/**
* The analyzer name
*/
private static final String ANALYZER_NAME = "Assembly Analyzer";
/**
* The analysis phase
*/
private static final AnalysisPhase ANALYSIS_PHASE = AnalysisPhase.INFORMATION_COLLECTION;
/**
* A descriptor for the type of dependencies processed or added by this
* analyzer.
*/
public static final String DEPENDENCY_ECOSYSTEM = Ecosystem.DOTNET;
/**
* The list of supported extensions
*/
private static final String[] SUPPORTED_EXTENSIONS = {"dll", "exe"};
/**
* The File Filter used to filter supported extensions.
*/
private static final FileFilter FILTER = FileFilterBuilder.newInstance().addExtensions(
SUPPORTED_EXTENSIONS).build();
/**
* The file path to `GrokAssembly.dll`.
*/
private File grokAssembly = null;
/**
* The base argument list to call GrokAssembly.
*/
private List<String> baseArgumentList = null;
/**
* Builds the beginnings of a List for ProcessBuilder
*
* @return the list of arguments to begin populating the ProcessBuilder
*/
protected List<String> buildArgumentList() {
// Use file.separator as a wild guess as to whether this is Windows
final List<String> args = new ArrayList<>();
if (!StringUtils.isBlank(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_DOTNET_PATH))) {
args.add(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_DOTNET_PATH));
} else if (isDotnetPath()) {
args.add("dotnet");
} else {
return null;
}
args.add(grokAssembly.getPath());
return args;
}
/**
* Performs the analysis on a single Dependency.
*
* @param dependency the dependency to analyze
* @param engine the engine to perform the analysis under
* @throws AnalysisException if anything goes sideways
*/
@Override
public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException {
final File test = new File(dependency.getActualFilePath());
if (!test.isFile()) {
throw new AnalysisException(String.format("%s does not exist and cannot be analyzed by dependency-check",
dependency.getActualFilePath()));
}
if (grokAssembly == null) {
LOGGER.warn("GrokAssembly didn't get deployed");
return;
}
if (baseArgumentList == null) {
LOGGER.warn("Assembly Analyzer was unable to execute");
return;
}
final AssemblyData data;
final List<String> args = new ArrayList<>(baseArgumentList);
args.add(dependency.getActualFilePath());
final ProcessBuilder pb = new ProcessBuilder(args);
try {
final Process proc = pb.start();
try (GrokAssemblyProcessor processor = new GrokAssemblyProcessor();
ProcessReader processReader = new ProcessReader(proc, processor)) {
processReader.readAll();
final String errorOutput = processReader.getError();
if (!StringUtils.isBlank(errorOutput)) {
LOGGER.warn("Error from GrokAssembly: {}", errorOutput);
}
final int exitValue = proc.exitValue();
if (exitValue == 3) {
LOGGER.debug("{} is not a .NET assembly or executable and as such cannot be analyzed by dependency-check",
dependency.getActualFilePath());
return;
} else if (exitValue != 0) {
LOGGER.debug("Return code {} from GrokAssembly; dependency-check is unable to analyze the library: {}",
exitValue, dependency.getActualFilePath());
return;
}
data = processor.getAssemblyData();
}
// First, see if there was an error
final String error = data.getError();
if (error != null && !error.isEmpty()) {
throw new AnalysisException(error);
}
if (data.getWarning() != null) {
LOGGER.debug("Grok Assembly - could not get namespace on dependency `{}` - {}", dependency.getActualFilePath(), data.getWarning());
}
updateDependency(data, dependency);
} catch (GrokParseException saxe) {
LOGGER.error("----------------------------------------------------");
LOGGER.error("Failed to read the Assembly Analyzer results.");
LOGGER.error("----------------------------------------------------");
throw new AnalysisException("Couldn't parse Assembly Analyzer results (GrokAssembly)", saxe);
} catch (IOException ioe) {
throw new AnalysisException(ioe);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
throw new AnalysisException("GrokAssembly process interrupted", ex);
}
}
/**
* Updates the dependency information with the provided assembly data.
*
* @param data the assembly data
* @param dependency the dependency to update
*/
private void updateDependency(final AssemblyData data, Dependency dependency) {
final StringBuilder sb = new StringBuilder();
if (!StringUtils.isBlank(data.getFileDescription())) {
sb.append(data.getFileDescription());
}
if (!StringUtils.isBlank(data.getComments())) {
if (sb.length() > 0) {
sb.append("\n\n");
}
sb.append(data.getComments());
}
if (!StringUtils.isBlank(data.getLegalCopyright())) {
if (sb.length() > 0) {
sb.append("\n\n");
}
sb.append(data.getLegalCopyright());
}
if (!StringUtils.isBlank(data.getLegalTrademarks())) {
if (sb.length() > 0) {
sb.append("\n");
}
sb.append(data.getLegalTrademarks());
}
final String description = sb.toString();
if (description.length() > 0) {
dependency.setDescription(description);
addMatchingValues(data.getNamespaces(), description, dependency, EvidenceType.VENDOR);
addMatchingValues(data.getNamespaces(), description, dependency, EvidenceType.PRODUCT);
}
if (!StringUtils.isBlank(data.getProductVersion())) {
dependency.addEvidence(EvidenceType.VERSION, "grokassembly", "ProductVersion", data.getProductVersion(), Confidence.HIGHEST);
}
if (!StringUtils.isBlank(data.getFileVersion())) {
dependency.addEvidence(EvidenceType.VERSION, "grokassembly", "FileVersion", data.getFileVersion(), Confidence.HIGH);
}
if (data.getFileVersion() != null && data.getProductVersion() != null) {
final int max = data.getFileVersion().length() > data.getProductVersion().length()
? data.getProductVersion().length() : data.getFileVersion().length();
int pos;
for (pos = 0; pos < max; pos++) {
if (data.getFileVersion().charAt(pos) != data.getProductVersion().charAt(pos)) {
break;
}
}
final DependencyVersion fileVersion = DependencyVersionUtil.parseVersion(data.getFileVersion(), true);
final DependencyVersion productVersion = DependencyVersionUtil.parseVersion(data.getProductVersion(), true);
if (pos > 0) {
final DependencyVersion matchingVersion = DependencyVersionUtil.parseVersion(data.getFileVersion().substring(0, pos), true);
if (fileVersion != null && data.getFileVersion() != null
&& fileVersion.toString().length() == data.getFileVersion().length()) {
if (matchingVersion != null && matchingVersion.getVersionParts().size() > 2) {
dependency.addEvidence(EvidenceType.VERSION, "AssemblyAnalyzer", "FilteredVersion",
matchingVersion.toString(), Confidence.HIGHEST);
dependency.setVersion(matchingVersion.toString());
}
}
}
if (dependency.getVersion() == null) {
if (data.getFileVersion() != null && data.getProductVersion() != null
&& data.getFileVersion().length() >= data.getProductVersion().length()) {
if (fileVersion != null && fileVersion.toString().length() == data.getFileVersion().length()) {
dependency.setVersion(fileVersion.toString());
} else if (productVersion != null && productVersion.toString().length() == data.getProductVersion().length()) {
dependency.setVersion(productVersion.toString());
}
} else {
if (productVersion != null && productVersion.toString().length() == data.getProductVersion().length()) {
dependency.setVersion(productVersion.toString());
} else if (fileVersion != null && fileVersion.toString().length() == data.getFileVersion().length()) {
dependency.setVersion(fileVersion.toString());
}
}
}
}
if (dependency.getVersion() == null && data.getFileVersion() != null) {
final DependencyVersion version = DependencyVersionUtil.parseVersion(data.getFileVersion(), true);
if (version != null) {
dependency.setVersion(version.toString());
}
}
if (dependency.getVersion() == null && data.getProductVersion() != null) {
final DependencyVersion version = DependencyVersionUtil.parseVersion(data.getProductVersion(), true);
if (version != null) {
dependency.setVersion(version.toString());
}
}
if (!StringUtils.isBlank(data.getCompanyName())) {
dependency.addEvidence(EvidenceType.VENDOR, "grokassembly", "CompanyName", data.getCompanyName(), Confidence.HIGHEST);
addMatchingValues(data.getNamespaces(), data.getCompanyName(), dependency, EvidenceType.VENDOR);
}
if (!StringUtils.isBlank(data.getProductName())) {
dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "ProductName", data.getProductName(), Confidence.HIGHEST);
addMatchingValues(data.getNamespaces(), data.getProductName(), dependency, EvidenceType.PRODUCT);
}
if (!StringUtils.isBlank(data.getFileDescription())) {
dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "FileDescription", data.getFileDescription(), Confidence.HIGH);
addMatchingValues(data.getNamespaces(), data.getFileDescription(), dependency, EvidenceType.PRODUCT);
}
final String internalName = data.getInternalName();
if (!StringUtils.isBlank(internalName)) {
dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "InternalName", internalName, Confidence.MEDIUM);
addMatchingValues(data.getNamespaces(), internalName, dependency, EvidenceType.PRODUCT);
addMatchingValues(data.getNamespaces(), internalName, dependency, EvidenceType.VENDOR);
if (dependency.getName() == null && StringUtils.containsIgnoreCase(dependency.getActualFile().getName(), internalName)) {
final String ext = FileUtils.getFileExtension(internalName);
if (ext != null) {
dependency.setName(internalName.substring(0, internalName.length() - ext.length() - 1));
} else {
dependency.setName(internalName);
}
}
}
final String originalFilename = data.getOriginalFilename();
if (!StringUtils.isBlank(originalFilename)) {
dependency.addEvidence(EvidenceType.PRODUCT, "grokassembly", "OriginalFilename", originalFilename, Confidence.MEDIUM);
addMatchingValues(data.getNamespaces(), originalFilename, dependency, EvidenceType.PRODUCT);
if (dependency.getName() == null && StringUtils.containsIgnoreCase(dependency.getActualFile().getName(), originalFilename)) {
final String ext = FileUtils.getFileExtension(originalFilename);
if (ext != null) {
dependency.setName(originalFilename.substring(0, originalFilename.length() - ext.length() - 1));
} else {
dependency.setName(originalFilename);
}
}
}
if (dependency.getName() != null && dependency.getVersion() != null) {
try {
dependency.addSoftwareIdentifier(new PurlIdentifier("generic", dependency.getName(), dependency.getVersion(), Confidence.MEDIUM));
} catch (MalformedPackageURLException ex) {
LOGGER.debug("Unable to create Package URL Identifier for " + dependency.getName(), ex);
dependency.addSoftwareIdentifier(new GenericIdentifier(
String.format("%s@%s", dependency.getName(), dependency.getVersion()),
Confidence.MEDIUM));
}
}
dependency.setEcosystem(DEPENDENCY_ECOSYSTEM);
}
/**
* Initialize the analyzer. In this case, extract GrokAssembly.dll to a
* temporary location.
*
* @param engine a reference to the dependency-check engine
* @throws InitializationException thrown if anything goes wrong
*/
@Override
public void prepareFileTypeAnalyzer(Engine engine) throws InitializationException {
grokAssembly = extractGrokAssembly();
baseArgumentList = buildArgumentList();
if (baseArgumentList == null) {
setEnabled(false);
LOGGER.error("----------------------------------------------------");
LOGGER.error(".NET Assembly Analyzer could not be initialized and at least one "
+ "'exe' or 'dll' was scanned. The 'dotnet' executable could not be found on "
+ "the path; either disable the Assembly Analyzer or add the path to dotnet "
+ "core in the configuration.");
LOGGER.error("----------------------------------------------------");
return;
}
try {
final ProcessBuilder pb = new ProcessBuilder(baseArgumentList);
final Process p = pb.start();
try (ProcessReader processReader = new ProcessReader(p)) {
processReader.readAll();
final String error = processReader.getError();
if (p.exitValue() != 1 || !StringUtils.isBlank(error)) {
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer, please see the log for more details.");
LOGGER.debug("GrokAssembly.dll is not working properly");
grokAssembly = null;
setEnabled(false);
throw new InitializationException("Could not execute .NET AssemblyAnalyzer");
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
+ "dependency-check requires dotnet 6.0 core to be installed to analyze assemblies;\n"
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
setEnabled(false);
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
} catch (IOException e) {
LOGGER.warn("An error occurred with the .NET AssemblyAnalyzer;\n"
+ "dependency-check requires dotnet 6.0 core to be installed to analyze assemblies;\n"
+ "this can be ignored unless you are scanning .NET DLLs. Please see the log for more details.");
LOGGER.debug("Could not execute GrokAssembly {}", e.getMessage());
setEnabled(false);
throw new InitializationException("An error occurred with the .NET AssemblyAnalyzer", e);
}
}
/**
* Extracts the GrokAssembly executable.
*
* @return the path to the extracted executable
* @throws InitializationException thrown if the executable could not be
* extracted
*/
private File extractGrokAssembly() throws InitializationException {
final File location;
try (InputStream in = FileUtils.getResourceAsStream("GrokAssembly.zip")) {
if (in == null) {
throw new InitializationException("Unable to extract GrokAssembly.dll - file not found");
}
location = FileUtils.createTempDirectory(getSettings().getTempDirectory());
ExtractionUtil.extractFiles(in, location);
} catch (ExtractionException ex) {
throw new InitializationException("Unable to extract GrokAssembly.dll", ex);
} catch (IOException ex) {
throw new InitializationException("Unable to create temp directory for GrokAssembly", ex);
}
return new File(location, "GrokAssembly.dll");
}
/**
* Removes resources used from the local file system.
*
* @throws Exception thrown if there is a problem closing the analyzer
*/
@Override
public void closeAnalyzer() throws Exception {
FileUtils.delete(grokAssembly.getParentFile());
}
@Override
protected FileFilter getFileFilter() {
return FILTER;
}
/**
* Gets this analyzer's name.
*
* @return the analyzer name
*/
@Override
public String getName() {
return ANALYZER_NAME;
}
/**
* Returns the phase this analyzer runs under.
*
* @return the phase this runs under
*/
@Override
public AnalysisPhase getAnalysisPhase() {
return ANALYSIS_PHASE;
}
/**
* Returns the key used in the properties file to reference the analyzer's
* enabled property.
*
* @return the analyzer's enabled property setting key
*/
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED;
}
/**
* Tests to see if a file is in the system path.
*
* @return <code>true</code> if dotnet could be found in the path; otherwise
* <code>false</code>
*/
private boolean isDotnetPath() {
final String[] args = new String[2];
args[0] = "dotnet";
args[1] = "--version";
final ProcessBuilder pb = new ProcessBuilder(args);
try {
final Process proc = pb.start();
try (ProcessReader processReader = new ProcessReader(proc)) {
processReader.readAll();
final int exitValue = proc.exitValue();
if (exitValue == 0) {
return true;
}
final String output = processReader.getOutput();
if (output.length() > 0) {
return true;
}
}
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
LOGGER.debug("Path search failed for dotnet", ex);
} catch (IOException ex) {
LOGGER.debug("Path search failed for dotnet", ex);
}
return false;
}
/**
* Cycles through the collection of class name information to see if parts
* of the package names are contained in the provided value. If found, it
* will be added as the HIGHEST confidence evidence because we have more
* then one source corroborating the value.
*
* @param packages a collection of class name information
* @param value the value to check to see if it contains a package name
* @param dep the dependency to add new entries too
* @param type the type of evidence (vendor, product, or version)
*/
protected static void addMatchingValues(List<String> packages, String value, Dependency dep, EvidenceType type) {
if (value == null || value.isEmpty() || packages == null || packages.isEmpty()) {
return;
}
for (String key : packages) {
final int pos = StringUtils.indexOfIgnoreCase(value, key);
if ((pos == 0 && (key.length() == value.length() || (key.length() < value.length()
&& !Character.isLetterOrDigit(value.charAt(key.length())))))
|| (pos > 0 && !Character.isLetterOrDigit(value.charAt(pos - 1))
&& (pos + key.length() == value.length() || (key.length() < value.length()
&& !Character.isLetterOrDigit(value.charAt(pos + key.length())))))) {
dep.addEvidence(type, "dll", "namespace", key, Confidence.HIGHEST);
}
}
}
/**
* Used in testing only - this simply returns the path to the extracted
* GrokAssembly.dll.
*
* @return the path to the extracted GrokAssembly.dll
*/
File getGrokAssemblyPath() {
return grokAssembly;
}
}
| |
package it.finsiel.siged.mvc.presentation.action;
import it.finsiel.siged.constant.Constants;
import it.finsiel.siged.constant.ReturnValues;
import it.finsiel.siged.exception.AuthenticationException;
import it.finsiel.siged.exception.DataException;
import it.finsiel.siged.model.organizzazione.Organizzazione;
import it.finsiel.siged.model.organizzazione.Utente;
import it.finsiel.siged.mvc.bo.OrganizzazioneBO;
import it.finsiel.siged.mvc.bo.RegistroBO;
import it.finsiel.siged.mvc.bo.UfficioBO;
import it.finsiel.siged.mvc.business.OrganizzazioneDelegate;
import it.finsiel.siged.mvc.business.RegistroDelegate;
import it.finsiel.siged.mvc.business.RegistroEmergenzeDelegate;
import it.finsiel.siged.mvc.business.UtenteDelegate;
import it.finsiel.siged.mvc.presentation.actionform.LogonForm;
import it.finsiel.siged.mvc.vo.lookup.ParametriLdapVO;
import it.finsiel.siged.mvc.vo.organizzazione.UtenteVO;
import it.finsiel.siged.servlet.SessionTimeoutNotifier;
import it.finsiel.siged.util.ServletUtil;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.apache.struts.action.Action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
/**
* Implementation of <strong>Action </strong> that validates a user logon.
*
* @author Almaviva sud.
*/
public final class LogonAction extends Action {
// ----------------------------------------------------- Instance Variables
/**
* The <code>Log</code> instance for this application.
*/
static Logger logger = Logger.getLogger(LogonAction.class.getName());
// --------------------------------------------------------- Public Methods
/**
* Process the specified HTTP request, and create the corresponding HTTP
* response (or forward to another web component that will create it).
* Return an <code>ActionForward</code> instance describing where and how
* control should be forwarded, or <code>null</code> if the response has
* already been completed.
*
* @param mapping
* The ActionMapping used to select this instance
* @param form
* The optional ActionForm bean for this request (if any)
* @param request
* The HTTP request we are processing
* @param response
* The HTTP response we are creating
*
* @exception Exception
* if business logic throws an exception
*/
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ServletContext context = getServlet().getServletContext();
LogonForm lForm = (LogonForm) form;
HttpSession session = request.getSession();
Utente utente = null;
if ("/logoff".equals(mapping.getPath())) {
utente = (Utente) session.getAttribute(Constants.UTENTE_KEY);
disconnectUtente(utente);
logger.info("Invalidate Session ID:" + session.getId());
session.invalidate();
session = request.getSession(true);// errore ???
return (mapping.findForward("logon"));
}
if ("".equals(lForm.getLogin())) {
return (mapping.findForward("input"));
}
// Validate the request parameters specified by the user
ActionMessages errors = new ActionMessages();
// la validazione ci assicura che username e password non sono vuoti
String username = lForm.getUsername();
String password = lForm.getPassword();
Organizzazione organizzazione = Organizzazione.getInstance();
utente = organizzazione.getUtente(username);
// TODO: check
if (utente == null && !organizzazione.getValueObject().getFlagLdap().equals("1")) {
// utente
errors.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage(
"error.authentication.failed"));
} else if (utente != null && utente.getSessionId() != null) {
// l'utente ha gia' un'altra sessione
if (Boolean.TRUE.equals(lForm.getForzatura())) {
// se ha gia' deciso di forzare, forziamo il
// login
if ("".equals(password) || "".equals(username)) {
errors.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage(
"error.authentication.failed.passins"));
if (!errors.isEmpty()) {
request.setAttribute("mostra_forzatura", Boolean.TRUE);
saveErrors(request, errors);
return (mapping.findForward("input"));
}
} else if (!password.equals(utente.getValueObject()
.getPassword())
|| !username.equals(utente.getValueObject()
.getUsername())) {
errors.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage("error.authentication.failed"));
if (!errors.isEmpty()) {
request.setAttribute("mostra_forzatura", Boolean.TRUE);
saveErrors(request, errors);
return (mapping.findForward("input"));
}
} else {
try {
// rimuovo la sessioneId precedente dal context...
Organizzazione.getInstance().removeSessionIdUtente(
utente.getSessionId());
// distruggo la sessione dal servlet container
session.invalidate();
// chiedo al container una nuova sessione
session = request.getSession(true);
logger.info("Session just created:" + session.getId());
connectUtente(session, context, utente);
} catch (DataException de) {
logger.error("connectUtente", de);
errors.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage(
"error.authentication.connect"));
}
}
} else {
// altrimenti si propone la forzatura
request.setAttribute("mostra_forzatura", Boolean.TRUE);
errors.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage(
"error.authentication.already_connected"));
}
} else {
// primo accesso da parte dell'utente, autentichiamo.
logger.info("Authenticating user:" + username + "/" + password);
UtenteDelegate utenteDelegate = UtenteDelegate.getInstance();
UtenteVO uteVO = null;
if (utenteDelegate == null) {
errors.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage(
"error.delegate.missing"));
} else {
// LDAP?
// TODO: prendere i valori veri, vedere codice commentato
boolean useLdap = organizzazione.getValueObject().getFlagLdap()
.equals("1");
ParametriLdapVO ldapSettings = organizzazione.getValueObject()
.getParametriLdap();
if (useLdap) {
try {
uteVO = utenteDelegate.getUtente(username, password,
ldapSettings);
} catch (AuthenticationException e) {
errors
.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage(
"error.authentication.failed"));
}
} else {
uteVO = utenteDelegate.getUtente(username, password);
}
if (uteVO == null
|| uteVO.getReturnValue() != ReturnValues.FOUND) {
// password errata
errors.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage("error.authentication.failed"));
} else if (!uteVO.isAbilitato()) {
errors.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage(
"error.authentication.non_abilitato"));
} else {
try {
session = request.getSession(true);
connectUtente(session, context, utente);
// caricaConfigurazioneUtente(uteVO.getId().intValue());
} catch (DataException de) {
logger.error("connectUtente", de);
errors.add(ActionMessages.GLOBAL_MESSAGE,
new ActionMessage(
"error.authentication.connect"));
}
}
}
}
// Report any errors we have discovered back to the original form
if (!errors.isEmpty()) {
saveErrors(request, errors);
return (mapping.findForward("input"));
}
// Remove the obsolete form bean
if (mapping.getAttribute() != null) {
if ("request".equals(mapping.getScope()))
request.removeAttribute(mapping.getAttribute());
else
request.getSession().removeAttribute(mapping.getAttribute());
}
if (utente == null || utente.getRegistri().size() == 0
|| utente.getUffici().size() == 0) {
// pagina errore: nessun ufficio o registro disponibile
errors.add("registro", new ActionMessage(
"error.authentication.non_abilitato"));
saveErrors(request, errors);
disconnectUtente(utente);
return (mapping.findForward("input"));
} else if (utente.getRegistri().size() > 1
|| utente.getUffici().size() > 1) {
// pagina scelta registro
return (mapping.findForward("scelta_registro"));
} else {
utente.setRegistroInUso((RegistroBO.getUnicoRegistro(utente
.getRegistri())).getId().intValue());
utente.setUfficioInUso((UfficioBO.getUnicoUfficio(utente
.getUffici())).getId().intValue());
// settare il registro di lavoro forward alla pagina di default
// dell'utente
int numProtocolliRegistroEmergenza = RegistroEmergenzeDelegate
.getInstance().getNumeroProtocolliPrenotati(
utente.getRegistroInUso());
request.getSession().setAttribute(
"PROTOCOLLI_EMERGENZA",
(numProtocolliRegistroEmergenza > 0 ? new Integer(
numProtocolliRegistroEmergenza) : null));
return (mapping.findForward("pagina_predefinita"));
}
}
// ------------------------------------------------------ Private Methods
/**
* TODO: spostare nella classe OrganizzazioneBO ?
*/
private void connectUtente(HttpSession session, ServletContext context,
Utente utente) throws ServletException, IOException, DataException {
utente.setSessionId(session.getId());
RegistroDelegate registroDelegate = RegistroDelegate.getInstance();
OrganizzazioneDelegate organizzazioneDelegate = OrganizzazioneDelegate
.getInstance();
UtenteVO utenteVO = utente.getValueObject();
Map registri = registroDelegate.getRegistriUtente(utenteVO.getId()
.intValue());
utente.setRegistri(registri);
utente.setRegistroUfficialeId(RegistroBO
.getRegistroUfficialeId(registri.values()));
Organizzazione organizzazione = Organizzazione.getInstance();
HashMap uffici = OrganizzazioneBO
.getUfficiUtente(organizzazioneDelegate
.getIdentificativiUffici(utente.getValueObject()
.getId().intValue()));
utente.setUffici(uffici);
initializeUserSession(session, utente);
organizzazione.aggiungiUtenteConnesso(utente);
session.setAttribute(Constants.UTENTE_KEY, utente);
}
/*
* Create a session for this request, used to save temp file in unique way
* on the server, avoiding the possibility of multiple thread to overwrite
* the same file exception if cannot be creaated.
*/
public void initializeUserSession(HttpSession session, Utente utente)
throws ServletException, IOException {
SessionTimeoutNotifier stn = new SessionTimeoutNotifier(ServletUtil
.getContextPath(session)
+ session.getServletContext().getInitParameter(
Constants.TEMP_FILE_PATH), utente.getValueObject()
.getUsername(), session);
utente.getValueObject().setTempFolder(stn.getTempPath());
session.setAttribute(Constants.SESSION_NOTIFIER, stn);
}
/*
* Spostare questo metodo nel LogoutAction. Remove any temp file from the
* server cache for that request/session
*/
public void disconnectUtente(Utente utente) {
if (utente != null) {
Organizzazione organizzazione = Organizzazione.getInstance();
organizzazione.disconnettiUtente(utente);
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.actions;
import com.intellij.execution.ExecutionManager;
import com.intellij.execution.KillableProcess;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.execution.ui.RunContentManager;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.TaskInfo;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListItemDescriptor;
import com.intellij.openapi.ui.popup.PopupChooserBuilder;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.ex.StatusBarEx;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.ui.components.JBList;
import com.intellij.ui.popup.list.GroupedItemsListRenderer;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class StopAction extends DumbAwareAction implements AnAction.TransparentUpdate {
@Override
public void update(final AnActionEvent e) {
boolean enable = false;
Icon icon = getTemplatePresentation().getIcon();
String description = getTemplatePresentation().getDescription();
final Presentation presentation = e.getPresentation();
if (ActionPlaces.MAIN_MENU.equals(e.getPlace())) {
enable = !getCancellableProcesses(e.getProject()).isEmpty() || !getActiveDescriptors(e.getDataContext()).isEmpty();
}
else {
final ProcessHandler processHandler = getHandler(e.getDataContext());
if (processHandler != null && !processHandler.isProcessTerminated()) {
if (!processHandler.isProcessTerminating()) {
enable = true;
}
else if (processHandler instanceof KillableProcess && ((KillableProcess)processHandler).canKillProcess()) {
enable = true;
icon = AllIcons.Debugger.KillProcess;
description = "Kill process";
}
}
}
presentation.setEnabled(enable);
presentation.setIcon(icon);
presentation.setDescription(description);
}
@Override
public void actionPerformed(final AnActionEvent e) {
final DataContext dataContext = e.getDataContext();
ProcessHandler activeProcessHandler = getHandler(dataContext);
List<Pair<TaskInfo, ProgressIndicator>> backgroundTasks = getCancellableProcesses(e.getProject());
if (ActionPlaces.MAIN_MENU.equals(e.getPlace())) {
if (activeProcessHandler != null && !activeProcessHandler.isProcessTerminating() && !activeProcessHandler.isProcessTerminated()
&& backgroundTasks.isEmpty()) {
stopProcess(activeProcessHandler);
return;
}
Pair<List<HandlerItem>, HandlerItem>
handlerItems = getItemsList(backgroundTasks, getActiveDescriptors(dataContext), activeProcessHandler);
if (handlerItems.first.isEmpty()) return;
final JBList list = new JBList(handlerItems.first);
if (handlerItems.second != null) list.setSelectedValue(handlerItems.second, true);
list.setCellRenderer(new GroupedItemsListRenderer(new ListItemDescriptor() {
@Nullable
@Override
public String getTextFor(Object value) {
return value instanceof HandlerItem ? ((HandlerItem)value).displayName : null;
}
@Nullable
@Override
public String getTooltipFor(Object value) {
return null;
}
@Nullable
@Override
public Icon getIconFor(Object value) {
return value instanceof HandlerItem ? ((HandlerItem)value).icon : null;
}
@Override
public boolean hasSeparatorAboveOf(Object value) {
return value instanceof HandlerItem && ((HandlerItem)value).hasSeparator;
}
@Nullable
@Override
public String getCaptionAboveOf(Object value) {
return null;
}
}));
final PopupChooserBuilder builder = JBPopupFactory.getInstance().createListPopupBuilder(list);
final JBPopup popup = builder
.setMovable(true)
.setTitle(handlerItems.first.size() == 1 ? "Confirm process stop" : "Stop process")
.setFilteringEnabled(new Function<Object, String>() {
@Override
public String fun(Object o) {
return ((HandlerItem)o).displayName;
}
})
.setItemChoosenCallback(new Runnable() {
@Override
public void run() {
HandlerItem item = (HandlerItem)list.getSelectedValue();
if (item != null) item.stop();
}
}).setRequestFocus(true).createPopup();
popup.showCenteredInCurrentWindow(e.getProject());
}
else {
if (activeProcessHandler != null) {
stopProcess(activeProcessHandler);
}
}
}
private static List<Pair<TaskInfo, ProgressIndicator>> getCancellableProcesses(Project project) {
IdeFrame frame = ((WindowManagerEx)WindowManager.getInstance()).findFrameFor(project);
StatusBarEx statusBar = frame == null ? null : (StatusBarEx)frame.getStatusBar();
if (statusBar == null) return Collections.emptyList();
return ContainerUtil.findAll(statusBar.getBackgroundProcesses(),
new Condition<Pair<TaskInfo, ProgressIndicator>>() {
@Override
public boolean value(Pair<TaskInfo, ProgressIndicator> pair) {
return pair.first.isCancellable() && !pair.second.isCanceled();
}
});
}
private static Pair<List<HandlerItem>, HandlerItem> getItemsList(List<Pair<TaskInfo, ProgressIndicator>> tasks,
List<RunContentDescriptor> descriptors,
ProcessHandler activeProcessHandler) {
if (tasks.isEmpty() && descriptors.isEmpty()) return Pair.create(Collections.<HandlerItem>emptyList(), null);
ArrayList<HandlerItem> items = new ArrayList<HandlerItem>(tasks.size() + descriptors.size());
HandlerItem selected = null;
for (RunContentDescriptor descriptor : descriptors) {
final ProcessHandler handler = descriptor.getProcessHandler();
if (handler != null) {
HandlerItem item = new HandlerItem(descriptor.getDisplayName(), descriptor.getIcon(), false) {
@Override
void stop() {
stopProcess(handler);
}
};
items.add(item);
if (handler == activeProcessHandler) selected = item;
}
}
boolean hasSeparator = true;
for (final Pair<TaskInfo, ProgressIndicator> eachPair : tasks) {
items.add(new HandlerItem(eachPair.first.getTitle(), AllIcons.Process.Step_passive, hasSeparator) {
@Override
void stop() {
eachPair.second.cancel();
}
});
hasSeparator = false;
}
return Pair.<List<HandlerItem>, HandlerItem>create(items, selected);
}
private static void stopProcess(ProcessHandler processHandler) {
if (processHandler instanceof KillableProcess && processHandler.isProcessTerminating()) {
((KillableProcess)processHandler).killProcess();
return;
}
if (processHandler.detachIsDefault()) {
processHandler.detachProcess();
}
else {
processHandler.destroyProcess();
}
}
@Nullable
private static ProcessHandler getHandler(final DataContext dataContext) {
final RunContentDescriptor contentDescriptor = RunContentManager.RUN_CONTENT_DESCRIPTOR.getData(dataContext);
final ProcessHandler processHandler;
if (contentDescriptor != null) {
// toolwindow case
processHandler = contentDescriptor.getProcessHandler();
}
else {
// main menu toolbar
final Project project = PlatformDataKeys.PROJECT.getData(dataContext);
final RunContentDescriptor selectedContent =
project == null ? null : ExecutionManager.getInstance(project).getContentManager().getSelectedContent();
processHandler = selectedContent == null ? null : selectedContent.getProcessHandler();
}
return processHandler;
}
@NotNull
private static List<RunContentDescriptor> getActiveDescriptors(final DataContext dataContext) {
final Project project = PlatformDataKeys.PROJECT.getData(dataContext);
if (project == null) {
return Collections.emptyList();
}
final List<RunContentDescriptor> runningProcesses = ExecutionManager.getInstance(project).getContentManager().getAllDescriptors();
if (runningProcesses.isEmpty()) {
return Collections.emptyList();
}
final List<RunContentDescriptor> activeDescriptors = new ArrayList<RunContentDescriptor>();
for (RunContentDescriptor descriptor : runningProcesses) {
final ProcessHandler processHandler = descriptor.getProcessHandler();
if (processHandler != null && !processHandler.isProcessTerminating() && !processHandler.isProcessTerminated()) {
activeDescriptors.add(descriptor);
}
}
return activeDescriptors;
}
private abstract static class HandlerItem {
final String displayName;
final Icon icon;
final boolean hasSeparator;
private HandlerItem(String displayName, Icon icon, boolean hasSeparator) {
this.displayName = displayName;
this.icon = icon;
this.hasSeparator = hasSeparator;
}
public String toString() {
return displayName;
}
abstract void stop();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Contains the details of an Amazon RDS DB subnet group.
* </p>
* <p>
* This data type is used as a response element in the <code>DescribeDBSubnetGroups</code> action.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/DBSubnetGroup" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DBSubnetGroup implements Serializable, Cloneable {
/**
* <p>
* The name of the DB subnet group.
* </p>
*/
private String dBSubnetGroupName;
/**
* <p>
* Provides the description of the DB subnet group.
* </p>
*/
private String dBSubnetGroupDescription;
/**
* <p>
* Provides the VpcId of the DB subnet group.
* </p>
*/
private String vpcId;
/**
* <p>
* Provides the status of the DB subnet group.
* </p>
*/
private String subnetGroupStatus;
/**
* <p>
* Contains a list of <code>Subnet</code> elements.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Subnet> subnets;
/**
* <p>
* The Amazon Resource Name (ARN) for the DB subnet group.
* </p>
*/
private String dBSubnetGroupArn;
/**
* <p>
* The name of the DB subnet group.
* </p>
*
* @param dBSubnetGroupName
* The name of the DB subnet group.
*/
public void setDBSubnetGroupName(String dBSubnetGroupName) {
this.dBSubnetGroupName = dBSubnetGroupName;
}
/**
* <p>
* The name of the DB subnet group.
* </p>
*
* @return The name of the DB subnet group.
*/
public String getDBSubnetGroupName() {
return this.dBSubnetGroupName;
}
/**
* <p>
* The name of the DB subnet group.
* </p>
*
* @param dBSubnetGroupName
* The name of the DB subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withDBSubnetGroupName(String dBSubnetGroupName) {
setDBSubnetGroupName(dBSubnetGroupName);
return this;
}
/**
* <p>
* Provides the description of the DB subnet group.
* </p>
*
* @param dBSubnetGroupDescription
* Provides the description of the DB subnet group.
*/
public void setDBSubnetGroupDescription(String dBSubnetGroupDescription) {
this.dBSubnetGroupDescription = dBSubnetGroupDescription;
}
/**
* <p>
* Provides the description of the DB subnet group.
* </p>
*
* @return Provides the description of the DB subnet group.
*/
public String getDBSubnetGroupDescription() {
return this.dBSubnetGroupDescription;
}
/**
* <p>
* Provides the description of the DB subnet group.
* </p>
*
* @param dBSubnetGroupDescription
* Provides the description of the DB subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withDBSubnetGroupDescription(String dBSubnetGroupDescription) {
setDBSubnetGroupDescription(dBSubnetGroupDescription);
return this;
}
/**
* <p>
* Provides the VpcId of the DB subnet group.
* </p>
*
* @param vpcId
* Provides the VpcId of the DB subnet group.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* Provides the VpcId of the DB subnet group.
* </p>
*
* @return Provides the VpcId of the DB subnet group.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* Provides the VpcId of the DB subnet group.
* </p>
*
* @param vpcId
* Provides the VpcId of the DB subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* <p>
* Provides the status of the DB subnet group.
* </p>
*
* @param subnetGroupStatus
* Provides the status of the DB subnet group.
*/
public void setSubnetGroupStatus(String subnetGroupStatus) {
this.subnetGroupStatus = subnetGroupStatus;
}
/**
* <p>
* Provides the status of the DB subnet group.
* </p>
*
* @return Provides the status of the DB subnet group.
*/
public String getSubnetGroupStatus() {
return this.subnetGroupStatus;
}
/**
* <p>
* Provides the status of the DB subnet group.
* </p>
*
* @param subnetGroupStatus
* Provides the status of the DB subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withSubnetGroupStatus(String subnetGroupStatus) {
setSubnetGroupStatus(subnetGroupStatus);
return this;
}
/**
* <p>
* Contains a list of <code>Subnet</code> elements.
* </p>
*
* @return Contains a list of <code>Subnet</code> elements.
*/
public java.util.List<Subnet> getSubnets() {
if (subnets == null) {
subnets = new com.amazonaws.internal.SdkInternalList<Subnet>();
}
return subnets;
}
/**
* <p>
* Contains a list of <code>Subnet</code> elements.
* </p>
*
* @param subnets
* Contains a list of <code>Subnet</code> elements.
*/
public void setSubnets(java.util.Collection<Subnet> subnets) {
if (subnets == null) {
this.subnets = null;
return;
}
this.subnets = new com.amazonaws.internal.SdkInternalList<Subnet>(subnets);
}
/**
* <p>
* Contains a list of <code>Subnet</code> elements.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSubnets(java.util.Collection)} or {@link #withSubnets(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param subnets
* Contains a list of <code>Subnet</code> elements.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withSubnets(Subnet... subnets) {
if (this.subnets == null) {
setSubnets(new com.amazonaws.internal.SdkInternalList<Subnet>(subnets.length));
}
for (Subnet ele : subnets) {
this.subnets.add(ele);
}
return this;
}
/**
* <p>
* Contains a list of <code>Subnet</code> elements.
* </p>
*
* @param subnets
* Contains a list of <code>Subnet</code> elements.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withSubnets(java.util.Collection<Subnet> subnets) {
setSubnets(subnets);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the DB subnet group.
* </p>
*
* @param dBSubnetGroupArn
* The Amazon Resource Name (ARN) for the DB subnet group.
*/
public void setDBSubnetGroupArn(String dBSubnetGroupArn) {
this.dBSubnetGroupArn = dBSubnetGroupArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the DB subnet group.
* </p>
*
* @return The Amazon Resource Name (ARN) for the DB subnet group.
*/
public String getDBSubnetGroupArn() {
return this.dBSubnetGroupArn;
}
/**
* <p>
* The Amazon Resource Name (ARN) for the DB subnet group.
* </p>
*
* @param dBSubnetGroupArn
* The Amazon Resource Name (ARN) for the DB subnet group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DBSubnetGroup withDBSubnetGroupArn(String dBSubnetGroupArn) {
setDBSubnetGroupArn(dBSubnetGroupArn);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBSubnetGroupName() != null)
sb.append("DBSubnetGroupName: ").append(getDBSubnetGroupName()).append(",");
if (getDBSubnetGroupDescription() != null)
sb.append("DBSubnetGroupDescription: ").append(getDBSubnetGroupDescription()).append(",");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId()).append(",");
if (getSubnetGroupStatus() != null)
sb.append("SubnetGroupStatus: ").append(getSubnetGroupStatus()).append(",");
if (getSubnets() != null)
sb.append("Subnets: ").append(getSubnets()).append(",");
if (getDBSubnetGroupArn() != null)
sb.append("DBSubnetGroupArn: ").append(getDBSubnetGroupArn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DBSubnetGroup == false)
return false;
DBSubnetGroup other = (DBSubnetGroup) obj;
if (other.getDBSubnetGroupName() == null ^ this.getDBSubnetGroupName() == null)
return false;
if (other.getDBSubnetGroupName() != null && other.getDBSubnetGroupName().equals(this.getDBSubnetGroupName()) == false)
return false;
if (other.getDBSubnetGroupDescription() == null ^ this.getDBSubnetGroupDescription() == null)
return false;
if (other.getDBSubnetGroupDescription() != null && other.getDBSubnetGroupDescription().equals(this.getDBSubnetGroupDescription()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
if (other.getSubnetGroupStatus() == null ^ this.getSubnetGroupStatus() == null)
return false;
if (other.getSubnetGroupStatus() != null && other.getSubnetGroupStatus().equals(this.getSubnetGroupStatus()) == false)
return false;
if (other.getSubnets() == null ^ this.getSubnets() == null)
return false;
if (other.getSubnets() != null && other.getSubnets().equals(this.getSubnets()) == false)
return false;
if (other.getDBSubnetGroupArn() == null ^ this.getDBSubnetGroupArn() == null)
return false;
if (other.getDBSubnetGroupArn() != null && other.getDBSubnetGroupArn().equals(this.getDBSubnetGroupArn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBSubnetGroupName() == null) ? 0 : getDBSubnetGroupName().hashCode());
hashCode = prime * hashCode + ((getDBSubnetGroupDescription() == null) ? 0 : getDBSubnetGroupDescription().hashCode());
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
hashCode = prime * hashCode + ((getSubnetGroupStatus() == null) ? 0 : getSubnetGroupStatus().hashCode());
hashCode = prime * hashCode + ((getSubnets() == null) ? 0 : getSubnets().hashCode());
hashCode = prime * hashCode + ((getDBSubnetGroupArn() == null) ? 0 : getDBSubnetGroupArn().hashCode());
return hashCode;
}
@Override
public DBSubnetGroup clone() {
try {
return (DBSubnetGroup) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kendra.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Provides information that you can use to highlight a search result so that your users can quickly identify terms in
* the response.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kendra-2019-02-03/Highlight" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Highlight implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The zero-based location in the response string where the highlight starts.
* </p>
*/
private Integer beginOffset;
/**
* <p>
* The zero-based location in the response string where the highlight ends.
* </p>
*/
private Integer endOffset;
/**
* <p>
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
* </p>
*/
private Boolean topAnswer;
/**
* <p>
* The highlight type.
* </p>
*/
private String type;
/**
* <p>
* The zero-based location in the response string where the highlight starts.
* </p>
*
* @param beginOffset
* The zero-based location in the response string where the highlight starts.
*/
public void setBeginOffset(Integer beginOffset) {
this.beginOffset = beginOffset;
}
/**
* <p>
* The zero-based location in the response string where the highlight starts.
* </p>
*
* @return The zero-based location in the response string where the highlight starts.
*/
public Integer getBeginOffset() {
return this.beginOffset;
}
/**
* <p>
* The zero-based location in the response string where the highlight starts.
* </p>
*
* @param beginOffset
* The zero-based location in the response string where the highlight starts.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Highlight withBeginOffset(Integer beginOffset) {
setBeginOffset(beginOffset);
return this;
}
/**
* <p>
* The zero-based location in the response string where the highlight ends.
* </p>
*
* @param endOffset
* The zero-based location in the response string where the highlight ends.
*/
public void setEndOffset(Integer endOffset) {
this.endOffset = endOffset;
}
/**
* <p>
* The zero-based location in the response string where the highlight ends.
* </p>
*
* @return The zero-based location in the response string where the highlight ends.
*/
public Integer getEndOffset() {
return this.endOffset;
}
/**
* <p>
* The zero-based location in the response string where the highlight ends.
* </p>
*
* @param endOffset
* The zero-based location in the response string where the highlight ends.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Highlight withEndOffset(Integer endOffset) {
setEndOffset(endOffset);
return this;
}
/**
* <p>
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
* </p>
*
* @param topAnswer
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
*/
public void setTopAnswer(Boolean topAnswer) {
this.topAnswer = topAnswer;
}
/**
* <p>
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
* </p>
*
* @return Indicates whether the response is the best response. True if this is the best response; otherwise, false.
*/
public Boolean getTopAnswer() {
return this.topAnswer;
}
/**
* <p>
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
* </p>
*
* @param topAnswer
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Highlight withTopAnswer(Boolean topAnswer) {
setTopAnswer(topAnswer);
return this;
}
/**
* <p>
* Indicates whether the response is the best response. True if this is the best response; otherwise, false.
* </p>
*
* @return Indicates whether the response is the best response. True if this is the best response; otherwise, false.
*/
public Boolean isTopAnswer() {
return this.topAnswer;
}
/**
* <p>
* The highlight type.
* </p>
*
* @param type
* The highlight type.
* @see HighlightType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The highlight type.
* </p>
*
* @return The highlight type.
* @see HighlightType
*/
public String getType() {
return this.type;
}
/**
* <p>
* The highlight type.
* </p>
*
* @param type
* The highlight type.
* @return Returns a reference to this object so that method calls can be chained together.
* @see HighlightType
*/
public Highlight withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The highlight type.
* </p>
*
* @param type
* The highlight type.
* @return Returns a reference to this object so that method calls can be chained together.
* @see HighlightType
*/
public Highlight withType(HighlightType type) {
this.type = type.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBeginOffset() != null)
sb.append("BeginOffset: ").append(getBeginOffset()).append(",");
if (getEndOffset() != null)
sb.append("EndOffset: ").append(getEndOffset()).append(",");
if (getTopAnswer() != null)
sb.append("TopAnswer: ").append(getTopAnswer()).append(",");
if (getType() != null)
sb.append("Type: ").append(getType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Highlight == false)
return false;
Highlight other = (Highlight) obj;
if (other.getBeginOffset() == null ^ this.getBeginOffset() == null)
return false;
if (other.getBeginOffset() != null && other.getBeginOffset().equals(this.getBeginOffset()) == false)
return false;
if (other.getEndOffset() == null ^ this.getEndOffset() == null)
return false;
if (other.getEndOffset() != null && other.getEndOffset().equals(this.getEndOffset()) == false)
return false;
if (other.getTopAnswer() == null ^ this.getTopAnswer() == null)
return false;
if (other.getTopAnswer() != null && other.getTopAnswer().equals(this.getTopAnswer()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBeginOffset() == null) ? 0 : getBeginOffset().hashCode());
hashCode = prime * hashCode + ((getEndOffset() == null) ? 0 : getEndOffset().hashCode());
hashCode = prime * hashCode + ((getTopAnswer() == null) ? 0 : getTopAnswer().hashCode());
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
return hashCode;
}
@Override
public Highlight clone() {
try {
return (Highlight) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.kendra.model.transform.HighlightMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
import java.io.*;
import java.util.*;
import java.util.Map.Entry;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.tartarus.snowball.ext.porterStemmer;
public class LaplaceSmoothingStemmed {
public static void main(String[] args) throws IOException {
try {
String type = "stemAndStop";
Utility u = new Utility(type);
String folder = u.returnFolderName();
Double avg_doc_length = u.returnAvgLength();
Map<String, DocBean> docCatBean = u.getDocCat();
Map<String, TokenCatalogBean> tokenCatBean = u.getTokenCat();
System.out.println("Avg Doc Length:: " + avg_doc_length);
System.out.println("DocCatBean Size:: " + docCatBean.size());
System.out.println("Token Cat Bean:: " + tokenCatBean.size());
System.out.println("Doc Bean:::: " + u.getDocBean().size());
long vocabSize = tokenCatBean.size();
System.out.println("Vocab Size ::" + vocabSize);
System.out.println("Avg Doc Length:::: " + avg_doc_length);
/* Method to read the query file */
String query_file_path = "C:/Users/Nitin/NEU/Summer Sem/IR/Data/Assign 1/AP89_DATA/AP_DATA/query_desc.51-100.short.txt";
/* This will be later replaced by path from the config file */
GetFinalQueries qu = new GetFinalQueries();
List<String> queries = qu.readQueryFile(query_file_path);
/*
* Method to get stopwords from the file and append the common words
* from query file
*/
/* Changed the split regex from space to space and hypen */
List<String> stop_words_final = qu.getStopWords();
/*
* List<String> stop_words_final = new ArrayList<String>();
*
* for (int i = 0; i < stop_words_custom.length; i++) {
* stop_words_final.add(stop_words_custom[i]); }
*/
/*
* Method to remove stopwords from query and just get the final
* query
*/
/*
* Iterating the queries one by one. Each Query is a list of String
* (Query Words)
*/
List<List<String>> final_query = new ArrayList<List<String>>();
final_query = qu.getFinalQueryList(queries, stop_words_final);
/*
* for (List<String> query : final_query) {
* System.out.println("___________________________"); for (String q
* : query) { System.out.println(q); }
* System.out.println("___________________________"); }
*/
List<List<String>> resultOkapi = new ArrayList<List<String>>();
/* for (String query : queries) { */
for (List<String> query : final_query) {
Map<String, String> queryTFList = new HashMap<String, String>();
Map<String, Double> rankTerm = new HashMap<String, Double>();
String querynum = null;
System.out.println("Query Minus stop words");
System.out.println("=======================");
// System.out.println(query.get(0));
/*
* For every word in a query calculates the okapif value and
* sums it up
*/
querynum = query.get(0).replace(".", "");
// System.out.println("Query Numm::: "+ querynum);
System.out.println("Query Size::" + query.size());
for (int i = 1; i < query.size(); i++) {
/* Method to calculate tfs for each term in query */
/*String w = query.get(i).toLowerCase();*/
porterStemmer es = new porterStemmer();
es.setCurrent(query.get(i).trim().toLowerCase());
String w = null;
if(es.stem()){
w=es.getCurrent();
}else{
w=query.get(i).trim().toLowerCase();
}
Map<String, Integer> tfMap = new HashMap<String, Integer>();
// System.out.println("Calculating for Word::: " + q);
tfMap = laplaceSmoothing(w.replaceAll("[,\"()]", ""),
avg_doc_length, tokenCatBean, docCatBean, u,
vocabSize,folder);
System.out.println("Size of TF Results:: " + tfMap.size()
+ "for :" + w.replaceAll("[,\"()]", ""));
for (Map.Entry<String, Integer> term : tfMap.entrySet()) {
if (queryTFList.get(term.getKey()) == null) {
queryTFList.put(term.getKey(), term.getValue()
.toString());
} else {
queryTFList.put(term.getKey(),
queryTFList.get(term.getKey()) + " "
+ term.getValue().toString());
// System.out.println("TF:: "+queryTFList.get(term.getKey()));
}
}
}
System.out.println("Final DOc List Size::: "
+ queryTFList.size());
System.out
.println("Calculating Laplace Smoothing Score for each ::::::: ");
for (Map.Entry<String, String> d : queryTFList.entrySet()) {
double docLen = getDocLength(d.getKey(), docCatBean);
rankTerm.put(
d.getKey(),
laplacePerTerm(d.getValue(), docLen,
avg_doc_length, vocabSize, query.size()));
}
/* Method to Sort Hashmap based on the value */
SortMap sm = new SortMap();
LinkedHashMap<String, Double> sortedRanks = (LinkedHashMap<String, Double>) sm.getSortedRankMap(rankTerm);
int j = 1;
List<String> queryResults = new ArrayList<String>();
for (Entry<String, Double> term : sortedRanks.entrySet()) {
if (j <= 100) {
String toWrite = querynum + " " + "Q0" + " "
+ term.getKey() + " " + j + " "
+ term.getValue() + " " + "EXP";
// System.out.println(toWrite);
queryResults.add(toWrite);
} else {
// bw.newLine();
break;
}
j++;
}
resultOkapi.add(queryResults);
}
WriteFile w = new WriteFile();
w.writeToFile(resultOkapi, "Laplace-1.txt",type);
// node.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private static double getDocLength(String key,
Map<String, DocBean> docCatBean) {
// TODO Auto-generated method stub
double doclength = 0.0;
if (docCatBean.containsKey(key)) {
doclength = docCatBean.get(key).getDocLength();
}
return doclength;
}
public static Map<String, Integer> laplaceSmoothing(String word,
Double avg_doc_length, Map<String, TokenCatalogBean> tokenCatBean,
Map<String, DocBean> docCatBean, Utility u, long vocabSize, String folder)
throws IOException {
Map<String, Integer> results = new HashMap<String, Integer>();
if (tokenCatBean.containsKey(word.toLowerCase().trim())) {
//System.out.println("Calling queryTF..Contains Words");
results = queryTF(word, tokenCatBean,docCatBean,u,folder);
}
return results;
}
public static <K, V extends Comparable<? super V>> Map<K, V> getSortedMap(
Map<K, V> rankTerm) {
System.out.println("Started Sorting..." + "@ " + new Date());
List<Map.Entry<K, V>> list = new LinkedList<Map.Entry<K, V>>(
rankTerm.entrySet());
Collections.sort(list, new Comparator<Map.Entry<K, V>>() {
public int compare(Map.Entry<K, V> o1, Map.Entry<K, V> o2) {
// return (o1.getValue()).compareTo(o2.getValue());
return Double.parseDouble(o1.getValue().toString()) > Double
.parseDouble(o2.getValue().toString()) ? -1 : Double
.parseDouble(o1.getValue().toString()) == Double
.parseDouble(o2.getValue().toString()) ? 0 : 1;
}
});
Map<K, V> result = new LinkedHashMap<K, V>();
for (Map.Entry<K, V> entry : list) {
result.put(entry.getKey(), entry.getValue());
}
System.out.println("Stopped Sorting..." + "@ " + new Date());
return result;
}
/*
* @SuppressWarnings("unchecked") private static LinkedHashMap<String,
* Double> getSortedMap(Map<String, Double> rankTerm) { // TODO
* Auto-generated method stub System.out.println("Started Sorting.."); List
* mapKeys = new ArrayList(rankTerm.keySet()); List mapValues = new
* ArrayList(rankTerm.values()); Collections.sort(mapValues,
* Collections.reverseOrder()); Collections.sort(mapKeys);
*
* @SuppressWarnings("rawtypes") LinkedHashMap sortedMap = new
* LinkedHashMap();
*
* Iterator valueIt = mapValues.iterator(); while (valueIt.hasNext()) {
* Object val = valueIt.next(); Iterator keyIt = mapKeys.iterator();
*
* while (keyIt.hasNext()) { Object key = keyIt.next(); String comp1 =
* rankTerm.get(key).toString(); String comp2 = val.toString();
*
* if (comp1.equals(comp2)){ rankTerm.remove(key); mapKeys.remove(key);
* sortedMap.put((String)key, (Double)val); break; }
*
* }
*
* } System.out.println("Finished Sorting.."); return sortedMap;
*
* }
*/
private static List<String> getStopWords() {
// TODO Auto-generated method stub
File stop_words = new File(
"C:/Users/Nitin/NEU/Summer Sem/IR/Data/Assign 1/AP89_DATA/AP_DATA/stoplist.txt");
String stop;
// StringBuffer stop_words_list = new StringBuffer();
List<String> stop_words_final = new ArrayList<String>();
/*
* for (int i = 0; i < stop_words_custom.length; i++) {
* stop_words_final.add(stop_words_custom[i]); }
*/
try {
if (stop_words.isFile()) {
BufferedReader br = new BufferedReader(new InputStreamReader(
new FileInputStream(stop_words)));
while ((stop = br.readLine()) != null) {
/* stop_words_list.append(stop).append(" "); */
stop_words_final.add(stop.trim());
}
stop_words_final.add("discuss");
stop_words_final.add("identify");
stop_words_final.add("report");
stop_words_final.add("include");
stop_words_final.add("predict");
stop_words_final.add("cite");
stop_words_final.add("describe");
stop_words_final.add("Document");
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return stop_words_final;
}
private static List<String> readQueryFile(String query_file_path)
throws IOException {
// TODO Auto-generated method stub
String str;
// StringBuffer query = new StringBuffer();
List<String> query_list = new ArrayList<String>();
try {
File query_file = new File(query_file_path);
BufferedReader br = new BufferedReader(new InputStreamReader(
new FileInputStream(query_file)));
while ((str = br.readLine()) != null) {
// query.append(str.trim());
int startIndex = 0;
String q = str.trim();
// q = str.replaceAll("[,!?\\()\"]", "");
if (q.length() > 0) {
int endIndexofStop = q.lastIndexOf(".");
query_list.add(q.substring(startIndex, endIndexofStop)
.trim());
}
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return query_list;
}
/* Function for calculating OkapiTF for individual query terms */
public static double okapiTFPerTerm(Integer termFreq, double docLen,
double avgLen) {
double LenRatio = (docLen / avgLen);
double oTF = (double) (termFreq / (termFreq + 0.5 + (1.5 * LenRatio)));
return oTF;
}
/* Function for calculating laplacePerTerm for individual query terms */
public static double laplacePerTerm(String termFreq, double docLen,
double avgLen, long vocabSize, int queryWords) {
// System.out.println("Query Words:::");
String[] termFs = termFreq.split(" ");
// System.out.println("TermFs::: "+ termFreq);
double log_p_lap = 0.0;
/*
* if(termFs.length <= queryWords){
*
* for(int j=0;j<(queryWords-termFs.length); j++){ log_p_lap +=
* Math.log(((0 + 1) / (docLen + vocabSize)); } log_p_lap+= (Math.log((0
* + 1) / (docLen + vocabSize))) * (queryWords - termFs.length);
*
* }
*/
if (termFs.length < queryWords) {
log_p_lap += (Math.log((0 + 1) / (docLen + vocabSize)))
* (queryWords - termFs.length);
}
for (int i = 0; i < termFs.length; i++) {
log_p_lap += Math.log(((Integer.parseInt(termFs[i])) + 1)
/ (docLen + vocabSize));
}
return log_p_lap;
}
public static Map<String, Integer> queryTF(String word,
Map<String, TokenCatalogBean> tokenCatBean,
Map<String, DocBean> docCatBean, Utility u, String folder) throws IOException {
Map<String, Integer> results = new HashMap<>();
long startOffset = 0;
long endOffset = 0;
word = word.toLowerCase().trim();
if (tokenCatBean.containsKey(word)) {
// System.out.println("tokenCatBeanContains the word "+word);
startOffset = tokenCatBean.get(word).getStartOffset();
endOffset = tokenCatBean.get(word).getEndOffset();
// System.out.println("StartOffset:: "+startOffset);
// System.out.println("EndOffset:: "+endOffset);
RandomAccessFile raf = new RandomAccessFile(
"C:\\Users\\Nitin\\Assign2\\"+folder+"\\TermsHash84.txt", "r");
raf.seek(startOffset);
byte[] termLine = new byte[(int) (endOffset - startOffset)];
raf.read(termLine);
String term = new String(termLine);
// System.out.println("Term Fetched:::: "+term);
String[] termOutput = term.split(" ");
// System.out.println("last splitTerm"+
// termOutput[termOutput.length-1]);
for (int i = 1; i < termOutput.length - 1; i++) {
String s = termOutput[i];
// System.out.println("String output:: "+s);
String[] docDetail = s.split(":");
int docId = Integer.parseInt(docDetail[0]);
int endIndex = docDetail[1].indexOf("-");
int tF = Integer.parseInt(docDetail[1].substring(0, endIndex));
results.put(u.getDocKey(docId), tF);
}
raf.close();
}
return results;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.