gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/spanner/v1/spanner.proto
package com.google.spanner.v1;
/**
* <pre>
* The request for [CreateSession][google.spanner.v1.Spanner.CreateSession].
* </pre>
*
* Protobuf type {@code google.spanner.v1.CreateSessionRequest}
*/
public final class CreateSessionRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.spanner.v1.CreateSessionRequest)
CreateSessionRequestOrBuilder {
// Use CreateSessionRequest.newBuilder() to construct.
private CreateSessionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateSessionRequest() {
database_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private CreateSessionRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
database_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.spanner.v1.SpannerProto.internal_static_google_spanner_v1_CreateSessionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.spanner.v1.SpannerProto.internal_static_google_spanner_v1_CreateSessionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.spanner.v1.CreateSessionRequest.class, com.google.spanner.v1.CreateSessionRequest.Builder.class);
}
public static final int DATABASE_FIELD_NUMBER = 1;
private volatile java.lang.Object database_;
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public java.lang.String getDatabase() {
java.lang.Object ref = database_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
database_ = s;
return s;
}
}
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public com.google.protobuf.ByteString
getDatabaseBytes() {
java.lang.Object ref = database_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
database_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getDatabaseBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, database_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getDatabaseBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, database_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.spanner.v1.CreateSessionRequest)) {
return super.equals(obj);
}
com.google.spanner.v1.CreateSessionRequest other = (com.google.spanner.v1.CreateSessionRequest) obj;
boolean result = true;
result = result && getDatabase()
.equals(other.getDatabase());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (37 * hash) + DATABASE_FIELD_NUMBER;
hash = (53 * hash) + getDatabase().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.spanner.v1.CreateSessionRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.spanner.v1.CreateSessionRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.spanner.v1.CreateSessionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.spanner.v1.CreateSessionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* The request for [CreateSession][google.spanner.v1.Spanner.CreateSession].
* </pre>
*
* Protobuf type {@code google.spanner.v1.CreateSessionRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.spanner.v1.CreateSessionRequest)
com.google.spanner.v1.CreateSessionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.spanner.v1.SpannerProto.internal_static_google_spanner_v1_CreateSessionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.spanner.v1.SpannerProto.internal_static_google_spanner_v1_CreateSessionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.spanner.v1.CreateSessionRequest.class, com.google.spanner.v1.CreateSessionRequest.Builder.class);
}
// Construct using com.google.spanner.v1.CreateSessionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
database_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.spanner.v1.SpannerProto.internal_static_google_spanner_v1_CreateSessionRequest_descriptor;
}
public com.google.spanner.v1.CreateSessionRequest getDefaultInstanceForType() {
return com.google.spanner.v1.CreateSessionRequest.getDefaultInstance();
}
public com.google.spanner.v1.CreateSessionRequest build() {
com.google.spanner.v1.CreateSessionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.spanner.v1.CreateSessionRequest buildPartial() {
com.google.spanner.v1.CreateSessionRequest result = new com.google.spanner.v1.CreateSessionRequest(this);
result.database_ = database_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.spanner.v1.CreateSessionRequest) {
return mergeFrom((com.google.spanner.v1.CreateSessionRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.spanner.v1.CreateSessionRequest other) {
if (other == com.google.spanner.v1.CreateSessionRequest.getDefaultInstance()) return this;
if (!other.getDatabase().isEmpty()) {
database_ = other.database_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.spanner.v1.CreateSessionRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.spanner.v1.CreateSessionRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object database_ = "";
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public java.lang.String getDatabase() {
java.lang.Object ref = database_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
database_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public com.google.protobuf.ByteString
getDatabaseBytes() {
java.lang.Object ref = database_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
database_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public Builder setDatabase(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
database_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public Builder clearDatabase() {
database_ = getDefaultInstance().getDatabase();
onChanged();
return this;
}
/**
* <pre>
* Required. The database in which the new session is created.
* </pre>
*
* <code>optional string database = 1;</code>
*/
public Builder setDatabaseBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
database_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder setDatabaseWithDatabaseName(com.google.spanner.v1.DatabaseName value) {
if (value == null) {
return setDatabase("");
}
return setDatabase(value.toString());
}
public final com.google.spanner.v1.DatabaseName getDatabaseAsDatabaseName() {
java.lang.String str = getDatabase();
if (str.isEmpty()) {
return null;
}
return com.google.spanner.v1.DatabaseName.parse(str);
}
// @@protoc_insertion_point(builder_scope:google.spanner.v1.CreateSessionRequest)
}
public final com.google.spanner.v1.DatabaseName getDatabaseAsDatabaseName() {
java.lang.String str = getDatabase();
if (str.isEmpty()) {
return null;
}
return com.google.spanner.v1.DatabaseName.parse(str);
}
// @@protoc_insertion_point(class_scope:google.spanner.v1.CreateSessionRequest)
private static final com.google.spanner.v1.CreateSessionRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.spanner.v1.CreateSessionRequest();
}
public static com.google.spanner.v1.CreateSessionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateSessionRequest>
PARSER = new com.google.protobuf.AbstractParser<CreateSessionRequest>() {
public CreateSessionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateSessionRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateSessionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateSessionRequest> getParserForType() {
return PARSER;
}
public com.google.spanner.v1.CreateSessionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package dk.dtu.imm.sensible.timespiral;
import hirondelle.date4j.DateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import android.content.Intent;
import android.location.Geocoder;
import android.text.SpannableString;
import android.text.SpannableStringBuilder;
import android.text.StaticLayout;
import android.text.style.ForegroundColorSpan;
import android.text.style.RelativeSizeSpan;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.googlecode.androidannotations.annotations.AfterViews;
import com.googlecode.androidannotations.annotations.Background;
import com.googlecode.androidannotations.annotations.EFragment;
import com.googlecode.androidannotations.annotations.UiThread;
import com.googlecode.androidannotations.annotations.ViewById;
import com.nostra13.universalimageloader.core.DisplayImageOptions;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.ImageLoaderConfiguration;
import dk.dtu.imm.sensible.Constants;
import dk.dtu.imm.sensible.components.CustomFragment;
import dk.dtu.imm.sensible.components.HorizontalListView;
import dk.dtu.imm.sensible.components.TutorialActivity;
import dk.dtu.imm.sensible.components.UILogger;
import dk.dtu.imm.sensible.movement.LocationProcessor;
import dk.dtu.imm.sensible.rest.LocationResponseEntity;
import dk.dtu.imm.sensible.rest.RestClientV2;
import dk.dtu.imm.sensible.stats.StatsCalculator;
import dk.dtu.imm.sensible.stats.TimeAtLocation;
import dk.dtu.imm.sensible.utils.DBScan;
import dk.dtu.imm.sensible.utils.DateTimeUtils;
import dk.dtu.imm.sensiblejournal.R;
@EFragment(R.layout.timespiral_layout)
public class TimeSpiralFragment extends CustomFragment {
@ViewById(R.id.surface_spiral) SpiralSurfaceView surfaceSpiral;
@ViewById(R.id.timeline) HorizontalListView timeline;
@ViewById(R.id.textview_locs) TextView textviewLocs;
@ViewById(R.id.progress_timespiral) View progress;
private static final int REQUEST_SIZE = 7;
private Geocoder geocoder;
private volatile boolean tabSelected;
private volatile DateTime startDate;
private List<TimeAtLocation> allStaticLocs;
private ColorGenerator colorGenerator;
private TimelineAdapter timelineAdapter;
private int selected;
private ArrayList<TimeAtLocation> clusters;
@AfterViews
public void afterViews() {
allStaticLocs = new ArrayList<TimeAtLocation>();
geocoder = new Geocoder(getActivity().getBaseContext());
startDate = DateTime.now(TimeZone.getDefault()).getStartOfDay();
colorGenerator = new ColorGenerator();
surfaceSpiral.setColorGenerator(colorGenerator);
timelineAdapter = new TimelineAdapter();
timeline.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) {
selected = arg2;
surfaceSpiral.render(allStaticLocs, selected);
}
});
timeline.setAdapter(timelineAdapter);
loadData();
}
private class TimelineAdapter extends BaseAdapter {
public int getCount() {
return allStaticLocs.size();
}
public View getView(int position, View convertView, ViewGroup parent) {
View itemView = convertView;
if (itemView == null) {
LayoutInflater inflater = getActivity().getLayoutInflater();
itemView = inflater.inflate(R.layout.timeline_item_layout, null, true);
}
TimeAtLocation t = allStaticLocs.get(position);
TextView textviewDateStart = (TextView) itemView.findViewById(R.id.tw_date);
DateTime from = DateTimeUtils.timestampToDateTime(t.start);
DateTime to = DateTimeUtils.timestampToDateTime(t.start + t.duration);
String dateTxt = "";
if(from.isSameDayAs(to)) {
dateTxt = from.format("WWW D MMM", Locale.US) + ", from " + from.format("hh:mm") + " to " + to.format("hh:mm");
} else {
dateTxt = "from " + from.format("WWW D MMM hh:mm", Locale.US) + " to " + to.format("WWW D MMM hh:mm", Locale.US);
}
textviewDateStart.setText(dateTxt);
textviewDateStart.setTextColor(colorGenerator.get(t.clusterId));
TextView textviewEvent = (TextView) itemView.findViewById(R.id.tw_event);
textviewEvent.setText(t.description);
textviewEvent.setTextColor(colorGenerator.get(t.clusterId));
String colStr = Integer.toHexString(colorGenerator.get(t.clusterId)).substring(2);
String letter = Character.toString((char) ('A' + t.clusterId - 1));
String url = "http://maps.googleapis.com/maps/api/staticmap?key=AIzaSyCA9d5S16ROB3mAMclBgxGuMxRvzGV-z9c&" +
"scale=4&sensor=true&zoom=12&size=250x90&maptype=roadmap&markers=color:0x" + colStr + "%7Clabel:" + letter + "%7C";
String latlon = String.format("%f,%f", t.lat, t.lon);
ImageView img = (ImageView) itemView.findViewById(R.id.img_event);
ImageLoader.getInstance().displayImage(url + latlon, img);
return itemView;
}
@Override
public Object getItem(int arg0) {
return null;
}
@Override
public long getItemId(int arg0) {
return 0;
}
}
@Background
public void loadData() {
TimeAtLocation lastStatic = null;
while (startDate.gt(Constants.EARLIEST_DATE) && tabSelected) {
showProgress();
try {
// fetch locs, find static locs, add to UI all but last, repeat merging last static loc
List<LocationResponseEntity> locs = RestClientV2.instance(getActivity().getApplicationContext()).getLocations(
startDate.minusDays(REQUEST_SIZE - 1), startDate);
List<TimeAtLocation> newStaticLocs = StatsCalculator.findStaticLocs(locs);
if (lastStatic != null && newStaticLocs.size() > 0) {
TimeAtLocation tal = newStaticLocs.get(newStaticLocs.size() - 1);
if (LocationProcessor.haversiveDist(
new LocationResponseEntity(0, (float) lastStatic.lat, (float) lastStatic.lon),
new LocationResponseEntity(0, (float) tal.lat, (float) tal.lon)) < 300) {
TimeAtLocation newTal = new TimeAtLocation(tal.start, tal.duration + lastStatic.duration,
(lastStatic.lat + tal.lat) / 2, (lastStatic.lon + tal.lon) / 2);
newStaticLocs.set(newStaticLocs.size() - 1, newTal);
}
}
List<TimeAtLocation> cloned = new ArrayList<TimeAtLocation>();
for(TimeAtLocation t : allStaticLocs) {
cloned.add(new TimeAtLocation(t));
}
for (int i = newStaticLocs.size() - 1; i > 0; i--) {
newStaticLocs.get(i).geocode(geocoder);
cloned.add(newStaticLocs.get(i));
}
DBScan.assignCluster(cloned, 300, 1);
allStaticLocs = cloned;
clusters = getClusteredTimeAtLocs();
colorGenerator.buildColorMap(clusters);
surfaceSpiral.render(allStaticLocs, selected);
updateUI();
lastStatic = newStaticLocs.size() > 0 ? newStaticLocs.get(0) : null;
} catch (Exception e) {
Log.e(Constants.APP_NAME, e.toString());
}
startDate = startDate.minusDays(REQUEST_SIZE);
}
Log.d(Constants.APP_NAME, "done");
removeProgress();
}
@UiThread
protected void showProgress() {
progress.setVisibility(View.VISIBLE);
}
@UiThread
protected void removeProgress() {
progress.setVisibility(View.GONE);
}
@UiThread
public void updateUI() {
timelineAdapter.notifyDataSetChanged();
SpannableStringBuilder builder = new SpannableStringBuilder();
for(TimeAtLocation t : clusters) {
SpannableString str = new SpannableString(" " + t.description + " ");
str.setSpan(new ForegroundColorSpan(colorGenerator.get(t.clusterId)), 0, str.length(), 0);
float scale = (float) (Math.log(t.duration) / Math.log(clusters.get(0).duration));
str.setSpan(new RelativeSizeSpan(scale), 0, str.length(), 0);
builder.append(str);
}
textviewLocs.setText(builder);
}
private ArrayList<TimeAtLocation> getClusteredTimeAtLocs() {
Map<Integer, TimeAtLocation> clusters = new HashMap<Integer, TimeAtLocation>();
for(TimeAtLocation t : allStaticLocs) {
if(clusters.containsKey(t.clusterId) == false) {
TimeAtLocation t2 = new TimeAtLocation(0, t.duration, 0, 0);
t2.clusterId = t.clusterId;
t2.description = t.description;
clusters.put(t.clusterId, t2);
} else {
TimeAtLocation t2 = clusters.get(t.clusterId);
t2.duration += t.duration;
}
}
ArrayList<TimeAtLocation> tal = new ArrayList<TimeAtLocation>(clusters.values());
Collections.sort(tal, new Comparator<TimeAtLocation>() {
@Override
public int compare(TimeAtLocation lhs, TimeAtLocation rhs) {
return (int) (rhs.duration - lhs.duration);
}
});
return tal;
}
@Override
public void onTabSelected() {
tabSelected = true;
if(getActivity() != null) {
UILogger.instance(getActivity().getApplicationContext()).logEvent("TimeSpiral");
if(TutorialActivity.wasShowFor(getActivity(), getTabTitle()) == false) {
Intent intent = TutorialActivity.getIntent(getActivity().getBaseContext(),
new int[] { R.drawable.tut_spiral_1, R.drawable.tut_spiral_2 });
startActivity(intent);
} else {
afterViews();
}
}
}
@Override
public void onTabUnselected() {
tabSelected = false;
}
@Override
public String getTabTitle() {
return "Timeline";
}
}
| |
package com.zhongyun.zxing.journeyapps.barcodescanner;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.Rect;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import android.view.WindowManager;
import java.util.ArrayList;
import java.util.List;
import com.zhongyun.viewer.R;
import com.zhongyun.zxing.journeyapps.barcodescanner.camera.CameraInstance;
import com.zhongyun.zxing.journeyapps.barcodescanner.camera.CameraSettings;
import com.zhongyun.zxing.journeyapps.barcodescanner.camera.DisplayConfiguration;
/**
* CameraPreview is a view that handles displaying of a camera preview on a SurfaceView. It is
* intended to be used as a base for realtime processing of camera images, e.g. barcode decoding
* or OCR, although none of this happens in CameraPreview itself.
*
* The camera is managed on a separate thread, using CameraInstance.
*
* Two methods MUST be called on CameraPreview to manage its state:
* 1. resume() - initialize the camera and start the preview. Call from the Activity's onResume().
* 2. pause() - stop the preview and release any resources. Call from the Activity's onPause().
*
* Startup sequence:
*
* 1. Create SurfaceView.
* 2. open camera.
* 2. layout this container, to get size
* 3. set display config, according to the container size
* 4. configure()
* 5. wait for preview size to be ready
* 6. set surface size according to preview size
* 7. set surface and start preview
*/
public class CameraPreview extends ViewGroup {
public interface StateListener {
/**
* Preview and frame sizes are determined.
*/
void previewSized();
/**
* Preview has started.
*/
void previewStarted();
/**
* Preview has stopped.
*/
void previewStopped();
/**
* The camera has errored, and cannot display a preview.
*
* @param error the error
*/
void cameraError(Exception error);
}
private static final String TAG = CameraPreview.class.getSimpleName();
private CameraInstance cameraInstance;
private WindowManager windowManager;
private Handler stateHandler;
private SurfaceView surfaceView;
private boolean previewActive = false;
private RotationListener rotationListener;
private List<StateListener> stateListeners = new ArrayList<StateListener>();
private DisplayConfiguration displayConfiguration;
private CameraSettings cameraSettings = new CameraSettings();
// Size of this container, non-null after layout is performed
private Size containerSize;
// Size of the preview resolution
private Size previewSize;
// Rect placing the preview surface
private Rect surfaceRect;
// Size of the current surface. non-null if the surface is ready
private Size currentSurfaceSize;
// Framing rectangle relative to this view
private Rect framingRect = null;
// Framing rectangle relative to the preview resolution
private Rect previewFramingRect = null;
// Size of the framing rectangle. If null, defaults to using a margin percentage.
private Size framingRectSize = null;
// Fraction of the width / heigth to use as a margin. This fraction is used on each size, so
// must be smaller than 0.5;
private double marginFraction = 0.1d;
private final SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
currentSurfaceSize = null;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (holder == null) {
Log.e(TAG, "*** WARNING *** surfaceChanged() gave us a null surface!");
return;
}
currentSurfaceSize = new Size(width, height);
startPreviewIfReady();
}
};
private final Handler.Callback stateCallback = new Handler.Callback() {
@Override
public boolean handleMessage(Message message) {
if (message.what == R.id.zxing_prewiew_size_ready) {
previewSized((Size) message.obj);
return true;
} else if (message.what == R.id.zxing_camera_error) {
Exception error = (Exception) message.obj;
if (isActive()) {
// This check prevents multiple errors from begin passed through.
pause();
fireState.cameraError(error);
}
}
return false;
}
};
private RotationCallback rotationCallback = new RotationCallback() {
@Override
public void onRotationChanged(int rotation) {
// Make sure this is run on the main thread.
stateHandler.post(new Runnable() {
@Override
public void run() {
rotationChanged();
}
});
}
};
public CameraPreview(Context context) {
super(context);
initialize(context, null, 0, 0);
}
public CameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
initialize(context, attrs, 0, 0);
}
public CameraPreview(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize(context, attrs, defStyleAttr, 0);
}
private void initialize(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
if (getBackground() == null) {
// Default to SurfaceView colour, so that there are less changes.
setBackgroundColor(Color.BLACK);
}
TypedArray attributes = context.obtainStyledAttributes(attrs, R.styleable.zxing_camera_preview);
int framingRectWidth = (int) attributes.getDimension(R.styleable.zxing_camera_preview_zxing_framing_rect_width, -1);
int framingRectHeight = (int) attributes.getDimension(R.styleable.zxing_camera_preview_zxing_framing_rect_height, -1);
attributes.recycle();
if (framingRectWidth > 0 && framingRectHeight > 0) {
this.framingRectSize = new Size(framingRectWidth, framingRectHeight);
}
windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
stateHandler = new Handler(stateCallback);
setupSurfaceView();
rotationListener = new RotationListener();
}
private void rotationChanged() {
pause();
resume();
}
private void setupSurfaceView() {
surfaceView = new SurfaceView(getContext());
if (Build.VERSION.SDK_INT < 11) {
surfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
surfaceView.getHolder().addCallback(surfaceCallback);
addView(surfaceView);
}
/**
* Add a listener to be notified of changes to the preview state, as well as camera errors.
*
* @param listener the listener
*/
public void addStateListener(StateListener listener) {
stateListeners.add(listener);
}
private final StateListener fireState = new StateListener() {
@Override
public void previewSized() {
for (StateListener listener : stateListeners) {
listener.previewSized();
}
}
@Override
public void previewStarted() {
for (StateListener listener : stateListeners) {
listener.previewStarted();
}
}
@Override
public void previewStopped() {
for (StateListener listener : stateListeners) {
listener.previewStopped();
}
}
@Override
public void cameraError(Exception error) {
for (StateListener listener : stateListeners) {
listener.cameraError(error);
}
}
};
private void calculateFrames() {
if (containerSize == null || previewSize == null || displayConfiguration == null) {
previewFramingRect = null;
framingRect = null;
surfaceRect = null;
throw new IllegalStateException("containerSize or previewSize is not set yet");
}
int previewWidth = previewSize.width;
int previewHeight = previewSize.height;
int width = containerSize.width;
int height = containerSize.height;
surfaceRect = displayConfiguration.scalePreview(previewSize);
Rect container = new Rect(0, 0, width, height);
framingRect = calculateFramingRect(container, surfaceRect);
Rect frameInPreview = new Rect(framingRect);
frameInPreview.offset(-surfaceRect.left, -surfaceRect.top);
previewFramingRect = new Rect(frameInPreview.left * previewWidth / surfaceRect.width(),
frameInPreview.top * previewHeight / surfaceRect.height(),
frameInPreview.right * previewWidth / surfaceRect.width(),
frameInPreview.bottom * previewHeight / surfaceRect.height());
if (previewFramingRect.width() <= 0 || previewFramingRect.height() <= 0) {
previewFramingRect = null;
framingRect = null;
Log.w(TAG, "Preview frame is too small");
} else {
fireState.previewSized();
}
}
/**
* Call this on the main thread, while the preview is running.
*
* @param on true to turn on the torch
*/
public void setTorch(boolean on) {
if (cameraInstance != null) {
cameraInstance.setTorch(on);
}
}
private void containerSized(Size containerSize) {
this.containerSize = containerSize;
if (cameraInstance != null) {
if (cameraInstance.getDisplayConfiguration() == null) {
displayConfiguration = new DisplayConfiguration(getDisplayRotation(), containerSize);
cameraInstance.setDisplayConfiguration(displayConfiguration);
cameraInstance.configureCamera();
}
}
}
private void previewSized(Size size) {
this.previewSize = size;
if (containerSize != null) {
calculateFrames();
requestLayout();
startPreviewIfReady();
}
}
private void startPreviewIfReady() {
if (currentSurfaceSize != null && previewSize != null && surfaceRect != null) {
if (currentSurfaceSize.equals(new Size(surfaceRect.width(), surfaceRect.height()))) {
startCameraPreview(surfaceView.getHolder());
} else {
// Surface is not the correct size yet
}
}
}
@SuppressLint("DrawAllocation")
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
containerSized(new Size(r - l, b - t));
if (surfaceRect == null) {
// Match the container, to reduce the risk of issues. The preview should never be drawn
// while the surface has this size.
surfaceView.layout(0, 0, getWidth(), getHeight());
} else {
surfaceView.layout(surfaceRect.left, surfaceRect.top, surfaceRect.right, surfaceRect.bottom);
}
}
/**
* The framing rectangle, relative to this view. Use to draw the rectangle.
*
* Will never be null while the preview is active.
*
* @return the framing rect, or null
* @see #isPreviewActive()
*/
public Rect getFramingRect() {
return framingRect;
}
/**
* The framing rect, relative to the camera preview resolution.
*
* Will never be null while the preview is active.
*
* @return the preview rect, or null
* @see #isPreviewActive()
*/
public Rect getPreviewFramingRect() {
return previewFramingRect;
}
/**
* @return the CameraSettings currently in use
*/
public CameraSettings getCameraSettings() {
return cameraSettings;
}
/**
* Set the CameraSettings. Use this to select a different camera, change exposure and torch
* settings, and some other options.
*
* This has no effect if the camera is already open.
*
* @param cameraSettings the new settings
*/
public void setCameraSettings(CameraSettings cameraSettings) {
this.cameraSettings = cameraSettings;
}
/**
* Start the camera preview and decoding. Typically this should be called from the Activity's
* onResume() method.
*
* Call from UI thread only.
*/
public void resume() {
// This must be safe to call multiple times
Util.validateMainThread();
Log.d(TAG, "resume()");
// initCamera() does nothing if called twice, but does log a warning
initCamera();
if (currentSurfaceSize != null) {
// The activity was paused but not stopped, so the surface still exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
startPreviewIfReady();
} else {
// Install the callback and wait for surfaceCreated() to init the camera.
surfaceView.getHolder().addCallback(surfaceCallback);
}
// To trigger surfaceSized again
requestLayout();
rotationListener.listen(getContext(), rotationCallback);
}
/**
* Pause scanning and the camera preview. Typically this should be called from the Activity's
* onPause() method.
*
* Call from UI thread only.
*/
public void pause() {
// This must be safe to call multiple times.
Util.validateMainThread();
Log.d(TAG, "pause()");
if (cameraInstance != null) {
cameraInstance.close();
cameraInstance = null;
previewActive = false;
}
if (currentSurfaceSize == null) {
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(surfaceCallback);
}
this.containerSize = null;
this.previewSize = null;
this.previewFramingRect = null;
rotationListener.stop();
fireState.previewStopped();
}
public Size getFramingRectSize() {
return framingRectSize;
}
/**
* Set an exact size for the framing rectangle. It will be centered in the view.
*
* @param framingRectSize the size
*/
public void setFramingRectSize(Size framingRectSize) {
this.framingRectSize = framingRectSize;
}
public double getMarginFraction() {
return marginFraction;
}
/**
* The the fraction of the width/height of view to be used as a margin for the framing rect.
* This is ignored if framingRectSize is specified.
*
* @param marginFraction the fraction
*/
public void setMarginFraction(double marginFraction) {
if(marginFraction >= 0.5d) {
throw new IllegalArgumentException("The margin fraction must be less than 0.5");
}
this.marginFraction = marginFraction;
}
/**
* Considered active if between resume() and pause().
*
* @return true if active
*/
protected boolean isActive() {
return cameraInstance != null;
}
private int getDisplayRotation() {
return windowManager.getDefaultDisplay().getRotation();
}
private void initCamera() {
if (cameraInstance != null) {
Log.w(TAG, "initCamera called twice");
return;
}
cameraInstance = new CameraInstance(getContext());
cameraInstance.setCameraSettings(cameraSettings);
cameraInstance.setReadyHandler(stateHandler);
cameraInstance.open();
}
private void startCameraPreview(SurfaceHolder holder) {
if (!previewActive) {
Log.i(TAG, "Starting preview");
cameraInstance.setSurfaceHolder(holder);
cameraInstance.startPreview();
previewActive = true;
previewStarted();
fireState.previewStarted();
}
}
/**
* Called when the preview is started. Override this to start decoding work.
*/
protected void previewStarted() {
}
/**
* Get the current CameraInstance. This may be null, and may change when
* pausing / resuming the preview.
*
* While the preview is active, getCameraInstance() will never be null.
*
* @return the current CameraInstance
* @see #isPreviewActive()
*/
public CameraInstance getCameraInstance() {
return cameraInstance;
}
/**
* The preview typically starts being active a while after calling resume(), and stops
* when calling pause().
*
* @return true if the preview is active
*/
public boolean isPreviewActive() {
return previewActive;
}
/**
* Calculate framing rectangle, relative to the preview frame.
*
* Note that the SurfaceView may be larger than the container.
*
* Override this for more control over the framing rect calculations.
*
* @param container this container, with left = top = 0
* @param surface the SurfaceView, relative to this container
* @return the framing rect, relative to this container
*/
protected Rect calculateFramingRect(Rect container, Rect surface) {
// intersection is the part of the container that is used for the preview
Rect intersection = new Rect(container);
boolean intersects = intersection.intersect(surface);
if(framingRectSize != null) {
// Specific size is specified. Make sure it's not larger than the container or surface.
int horizontalMargin = Math.max(0, (intersection.width() - framingRectSize.width) / 2);
int verticalMargin = Math.max(0, (intersection.height() - framingRectSize.height) / 2);
intersection.inset(horizontalMargin, verticalMargin);
return intersection;
}
// margin as 10% (default) of the smaller of width, height
int margin = (int)Math.min(intersection.width() * marginFraction, intersection.height() * marginFraction);
intersection.inset(margin, margin);
if (intersection.height() > intersection.width()) {
// We don't want a frame that is taller than wide.
intersection.inset(0, (intersection.height() - intersection.width()) / 2);
}
return intersection;
}
}
| |
package src.usi.testcase.inputdata;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.input.SAXBuilder;
import src.usi.configuration.PathsManager;
import com.google.common.collect.Lists;
public class DataManager {
private static DataManager instance = null;
private final HashMap<String, List<String>> validDataMap = new HashMap<>();
private final HashMap<String, List<String>> invalidDataMap = new HashMap<>();
private final HashMap<String, List<Integer>> validItemizedDataMap = new HashMap<>();
private final HashMap<String, List<Integer>> invalidItemizedDataMap = new HashMap<>();
private final List<String> discardedWords = Lists.newArrayList("the", "in", "these", "this",
"that", "of", "an", "and");
private final List<String> specialCharacters = Lists.newArrayList(":", ";", ".");
public static DataManager getInstance() throws Exception {
if (instance == null) {
instance = new DataManager();
}
return instance;
}
private DataManager() throws Exception {
this.loadDataFromXMLFile();
}
public List<String> getValidData(final String descriptor) throws Exception {
if (descriptor == null) {
throw new Exception("DataManager - getValidData: null input.");
}
final String desc = descriptor.trim().toLowerCase();
if (this.validDataMap.containsKey(desc)) {
return new ArrayList<>(this.validDataMap.get(desc));
}
final List<String> out = new ArrayList<>();
final List<String> descriptors = this.splitDescriptor(descriptor);
for (final String s : descriptors) {
final List<String> values = this.validDataMap.get(s);
if (values != null && values.size() > 0) {
out.addAll(values);
}
}
return out;
}
public List<String> getInvalidData(final String descriptor) throws Exception {
if (descriptor == null) {
throw new Exception("DataManager - getInvalidData: null input.");
}
final String desc = descriptor.trim().toLowerCase();
if (this.invalidDataMap.containsKey(desc)) {
return new ArrayList<>(this.invalidDataMap.get(desc));
}
final List<String> out = new ArrayList<>();
final List<String> descriptors = this.splitDescriptor(descriptor);
for (final String s : descriptors) {
final List<String> values = this.invalidDataMap.get(s);
if (values != null && values.size() > 0) {
out.addAll(values);
}
}
return out;
}
public List<Integer> getValidItemizedData(final String descriptor) throws Exception {
if (descriptor == null) {
throw new Exception("DataManager - getValidItemizedData: null input.");
}
final String desc = descriptor.trim().toLowerCase();
// for itemized data there must be a perfect match with the metadata
if (this.validItemizedDataMap.containsKey(desc)) {
return new ArrayList<>(this.validItemizedDataMap.get(desc));
}
return new ArrayList<>();
}
public List<Integer> getInvalidItemizedData(final String descriptor) throws Exception {
if (descriptor == null) {
throw new Exception("DataManager - getInvalidItemizedData: null input.");
}
final String desc = descriptor.trim().toLowerCase();
// for itemized data there must be a perfect match with the metadata
if (this.invalidItemizedDataMap.containsKey(desc)) {
return new ArrayList<>(this.invalidItemizedDataMap.get(desc));
}
return new ArrayList<>();
}
public List<String> getGenericData() {
final List<String> out = new ArrayList<>();
final List<String> values = this.validDataMap.get("generic-input-data");
if (values != null && values.size() > 0) {
out.addAll(values);
}
return out;
}
private void loadDataFromXMLFile() throws Exception {
final File dataSet = new File(PathsManager.getInputdataFilePath());
if (dataSet.exists()) {
final SAXBuilder builder = new SAXBuilder();
final Document document = builder.build(dataSet);
final Element rootElement = document.getRootElement();
if (!rootElement.getName().equals("DATASET")) {
throw new Exception("DataManager - loadDataFromXMLFile: wrong root.");
}
final List<Element> children = rootElement.getChildren();
for (final Element nodo : children) {
assert (nodo.getName().equals("DATA"));
if (nodo.getAttribute("type") != null
&& nodo.getAttribute("type").getValue().equals("itemized")) {
// if it is itemized
// get the metadata
final String metadata = nodo.getChildText("metadata").toLowerCase().trim();
assert (!this.invalidItemizedDataMap.containsKey(metadata) && !this.validItemizedDataMap
.containsKey(metadata));
final List<Integer> valid = new ArrayList<>();
final List<Integer> invalid = new ArrayList<>();
this.validItemizedDataMap.put(metadata, valid);
this.invalidItemizedDataMap.put(metadata, invalid);
// recupero i valori
if ((nodo.getChild("values") != null)
&& (nodo.getChild("values").getChildren() != null)) {
final List<Element> values = nodo.getChild("values").getChildren();
for (final Element valueNode : values) {
switch (valueNode.getName()) {
case "valid":
valid.add(Integer.valueOf(valueNode.getTextTrim()));
break;
case "invalid":
invalid.add(Integer.valueOf(valueNode.getTextTrim()));
break;
default:
throw new Exception(
"DataManager - loadDataFromXMLFile: value type not found.");
}
}
}
} else {
// get the metadata
final String metadata = nodo.getChildText("metadata").toLowerCase().trim();
assert (!this.invalidDataMap.containsKey(metadata) && !this.validDataMap
.containsKey(metadata));
final List<String> valid = new ArrayList<>();
final List<String> invalid = new ArrayList<>();
this.validDataMap.put(metadata, valid);
this.invalidDataMap.put(metadata, invalid);
// recupero i valori
if ((nodo.getChild("values") != null)
&& (nodo.getChild("values").getChildren() != null)) {
final List<Element> values = nodo.getChild("values").getChildren();
for (final Element valueNode : values) {
switch (valueNode.getName()) {
case "valid":
valid.add(valueNode.getTextTrim());
break;
case "invalid":
invalid.add(valueNode.getTextTrim());
break;
default:
throw new Exception(
"DataManager - loadDataFromXMLFile: value type not found.");
}
}
}
}
}
} else {
throw new Exception("DataManager - loadDataFromXMLFile: file not found.");
}
}
private List<String> splitDescriptor(final String descriptor) {
String d = descriptor;
for (final String s : this.specialCharacters) {
d = d.replace(s, " ");
}
for (final String s : this.discardedWords) {
d = d.replace(" " + s + " ", " ");
}
final String[] descs = descriptor.split(" ");
final List<String> out = new ArrayList<>();
for (final String desc : descs) {
if (desc.trim().length() > 0) {
out.add(desc.trim().toLowerCase());
}
}
return out;
}
}
| |
package roslab;
import java.awt.Point;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Random;
import java.util.ResourceBundle;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Group;
import javafx.scene.Scene;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.MenuItem;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.control.TreeCell;
import javafx.scene.control.TreeView;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.GridPane;
import javafx.scene.shape.Line;
import javafx.scene.shape.Rectangle;
import javafx.stage.FileChooser;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.util.Callback;
import org.apache.log4j.BasicConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import roslab.model.electronics.Circuit;
import roslab.model.general.Configuration;
import roslab.model.general.Endpoint;
import roslab.model.general.Library;
import roslab.model.general.Link;
import roslab.model.general.Node;
import roslab.model.mechanics.HWBlock;
import roslab.model.software.ROSMsgType;
import roslab.model.software.ROSNode;
import roslab.model.software.ROSPort;
import roslab.model.software.ROSTopic;
import roslab.model.ui.UIEndpoint;
import roslab.model.ui.UILink;
import roslab.model.ui.UINode;
import roslab.processors.general.ConfigurationParser;
import roslab.processors.general.LibraryParser;
import roslab.ui.general.NewLinkDialog;
import roslab.ui.general.NewNodeDialog;
import roslab.ui.general.ROSLabTree;
import roslab.ui.software.EditRateDialog;
import roslab.ui.software.LoadLibraryDialog;
import roslab.ui.software.NewCustomControllerDialog;
import roslab.ui.software.NewCustomPortDialog;
import roslab.ui.software.NewCustomTopicDialog;
public class ROSLabController implements Initializable {
static Logger logger = LoggerFactory.getLogger(ROSLabController.class);
// TODO Use this class for each tab contents (sw, hw, ee)
class ModeContents {
ROSLabTree tree;
Library lib;
Configuration config;
ContextMenu menu;
Group uiGroup;
}
@FXML
TabPane mainTabPane;
@FXML
Tab swTab;
@FXML
TreeView<String> swTreeView;
@FXML
AnchorPane swPane;
ROSLabTree swTree;
Library swLibrary = new Library();
Configuration swConfig;
ContextMenu addSWNodeMenu;
Group swUIObjects = new Group();
@FXML
Tab hwTab;
@FXML
TreeView<String> hwTreeView;
@FXML
AnchorPane hwPane;
ROSLabTree hwTree;
Library hwLibrary = new Library();
Configuration hwConfig;
ContextMenu addHWNodeMenu;
Group hwUIObjects = new Group();
@FXML
Tab eeTab;
@FXML
TreeView<String> eeTreeView;
@FXML
AnchorPane eePane;
ROSLabTree eeTree;
Library eeLibrary = new Library();
Configuration eeConfig;
ContextMenu addEENodeMenu;
Group eeUIObjects = new Group();
@FXML
ScrollPane swScroll;
Random r = new Random();
Rectangle selectionRectangle;
double selectionX;
double selectionY;
Line addPortLine;
Point portLineStart = new Point();
private Stage primaryStage;
@Override
public void initialize(URL location, ResourceBundle resources) {
assert swTreeView != null : "fx:id\"swTreeView\" was not injected";
assert hwTreeView != null : "fx:id\"hwTreeView\" was not injected";
assert eeTreeView != null : "fx:id\"eeTreeView\" was not injected";
BasicConfigurator.configure();
// TODO: get highlighting and selection of Nodes based on selection
// rectangle
// enableSelectionRectangle(swPane);
// enableSelectionRectangle(hwPane);
// enableSelectionRectangle(eePane);
// PythonLibraryHelper p = new PythonLibraryHelper();
swConfig = new Configuration("Demo", swLibrary);
swTree = new ROSLabTree(swLibrary, swConfig, this);
swPane.getChildren().add(swUIObjects);
swTreeView.setRoot(swTree);
swTreeView.setShowRoot(false);
swTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return swTree.new TreeCellImpl();
}
});
eeLibrary = Library.loadBaseElectronicsLibrary();
eeConfig = new Configuration("Demo", eeLibrary);
eeTree = new ROSLabTree(eeLibrary, eeConfig, this);
eePane.getChildren().add(eeUIObjects);
eeTreeView.setRoot(eeTree);
eeTreeView.setShowRoot(false);
eeTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return eeTree.new TreeCellImpl();
}
});
hwConfig = new Configuration("Demo", hwLibrary);
hwTree = new ROSLabTree(hwLibrary, hwConfig, this);
hwPane.getChildren().add(hwUIObjects);
hwTreeView.setRoot(hwTree);
hwTreeView.setShowRoot(false);
hwTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return hwTree.new TreeCellImpl();
}
});
createCanvasContextMenu(swLibrary, swConfig);
createCanvasContextMenu(eeLibrary, eeConfig);
createCanvasContextMenu(hwLibrary, hwConfig);
// addDragDrop(swPane);
}
public void createCanvasContextMenu(final Library lib, final Configuration config) {
final AnchorPane pane;
final Group uiObjects;
final ContextMenu menu;
if (swLibrary.equals(lib)) {
pane = swPane;
uiObjects = swUIObjects;
addSWNodeMenu = new ContextMenu();
menu = addSWNodeMenu;
}
else if (hwLibrary.equals(lib)) {
pane = hwPane;
uiObjects = hwUIObjects;
addHWNodeMenu = new ContextMenu();
menu = addHWNodeMenu;
}
else if (eeLibrary.equals(lib)) {
pane = eePane;
uiObjects = eeUIObjects;
addEENodeMenu = new ContextMenu();
menu = addEENodeMenu;
}
else {
return;
}
MenuItem addNodeItem = new MenuItem("Add Node");
addNodeItem.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
showNewNodeDialog(lib);
}
});
menu.getItems().add(addNodeItem);
if (!lib.equals(swLibrary)) {
MenuItem addLinkItem = new MenuItem("Add Link");
addLinkItem.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
showNewLinkDialog(config);
}
});
menu.getItems().add(addLinkItem);
}
pane.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent mouseEvent) {
if (mouseEvent.getButton() == MouseButton.SECONDARY && !uiObjects.getChildren().contains(mouseEvent.getTarget())) { // TODO
// test
menu.show(swPane, mouseEvent.getScreenX(), mouseEvent.getScreenY());
}
else if (mouseEvent.getButton() == MouseButton.PRIMARY) {
menu.hide();
}
}
});
}
// private void enableSelectionRectangle(final Pane p) {
// p.setOnMousePressed(new EventHandler<MouseEvent>() {
// @Override
// public void handle(MouseEvent mouseEvent) {
// selectionX = mouseEvent.getX();
// selectionY = mouseEvent.getY();
// selectionRectangle = new Rectangle(selectionX, selectionY, 0, 0);
// selectionRectangle.getStyleClass().add("SelectionRectangle");
// p.getChildren().add(selectionRectangle);
// }
// });
// p.setOnMouseReleased(new EventHandler<MouseEvent>() {
// @Override
// public void handle(MouseEvent mouseEvent) {
// p.getChildren().remove(selectionRectangle);
// }
// });
//
// p.setOnMouseDragged(new EventHandler<MouseEvent>() {
// @Override
// public void handle(MouseEvent mouseEvent) {
// double x = mouseEvent.getX();
// double y = mouseEvent.getY();
// if (x < selectionX) {
// selectionRectangle.setX(x);
// selectionRectangle.setWidth(selectionX - x);
// }
// else {
// selectionRectangle.setWidth(x - selectionRectangle.getX());
// }
// if (y < selectionY) {
// selectionRectangle.setY(y);
// selectionRectangle.setHeight(selectionY - y);
// }
// else {
// selectionRectangle.setHeight(y - selectionRectangle.getY());
// }
// }
// });
// }
public void addLibraryNode(ROSNode n) {
swLibrary.addNode(n);
swTree.addLibraryNode(n);
}
public void removeLibraryNode(ROSNode n) {
swLibrary.removeNode(n);
swTree.removeLibraryNode(n);
}
public void updateLibraryNode(ROSNode n) {
removeLibraryNode(n);
addLibraryNode(n);
}
public void addLibraryNode(Circuit n) {
eeLibrary.addNode(n);
eeTree.addLibraryNode(n);
}
public void removeLibraryNode(Circuit n) {
eeLibrary.removeNode(n);
eeTree.removeLibraryNode(n);
}
public void updateLibraryNode(Circuit n) {
removeLibraryNode(n);
addLibraryNode(n);
}
public void addLibraryNode(HWBlock n) {
hwLibrary.addNode(n);
hwTree.addLibraryNode(n);
}
public void removeLibraryNode(HWBlock n) {
hwLibrary.removeNode(n);
hwTree.removeLibraryNode(n);
}
public void updateLibraryNode(HWBlock n) {
removeLibraryNode(n);
addLibraryNode(n);
}
public void addConfigNode(Node n) {
UINode uin = null;
if (n.getUINode() == null) {
uin = new UINode(n, r.nextInt(400), r.nextInt(400));
}
else {
uin = n.getUINode();
}
Group grp = null;
switch (uin.getNode().getClass().getSimpleName()) {
case "ROSNode":
swConfig.addNode(n);
swTree.addConfigNode(n);
refreshSWConfigLinks();
grp = swUIObjects;
uin.setScrollPane(swScroll);
break;
case "HWBlock":
hwConfig.addNode(n);
hwTree.addConfigNode(n);
grp = hwUIObjects;
break;
case "Circuit":
eeConfig.addNode(n);
eeTree.addConfigNode(n);
grp = eeUIObjects;
break;
}
uin.addToGroup(grp, this);
// Order all of the UI objects
for (Object nn : grp.getChildren().toArray()) {
if (nn instanceof UINode) {
((UINode) nn).toTheFront();
}
if (nn instanceof UILink) {
((UILink) nn).toBack();
}
}
}
public void addConfigLink(Link l) {
UILink uil = new UILink(l);
l.setUILink(uil);
Group grp = null;
switch (l.getSrc().getParent().getClass().getSimpleName()) {
case "ROSNode":
swConfig.addLink(l);
swTree.addConfigLink(l);
grp = swUIObjects;
break;
case "HWBlock":
hwConfig.addLink(l);
hwTree.addConfigLink(l);
grp = hwUIObjects;
break;
case "Circuit":
eeConfig.addLink(l);
eeTree.addConfigLink(l);
grp = eeUIObjects;
break;
}
grp.getChildren().add(l.getUILink());
// Order all of the UI objects
for (Object nn : grp.getChildren().toArray()) {
if (nn instanceof UINode) {
((UINode) nn).toTheFront();
}
if (nn instanceof UILink) {
((UILink) nn).toBack();
}
}
}
public void removeConfigNode(Node n) {
switch (n.getClass().getSimpleName()) {
case "ROSNode":
for (UIEndpoint e : n.getUINode().getUIEndpoints()) {
for (UILink l : e.getUILinks()) {
swConfig.removeLink(l.getLink());
swTree.removeConfigLink(l.getLink());
if (e.equals(l.getSrc())) {
l.getDest().removeUILink(l);
if ("controller".equals(l.getDest().getParentNode().getAnnotation("custom-type")) && l.getDest().getUILinks().size() == 0) {
removeConfigPort(l.getDest().getParentNode(), l.getDest().getName());
}
}
else {
l.getSrc().removeUILink(l);
if ("controller".equals(l.getSrc().getParentNode().getAnnotation("custom-type")) && l.getSrc().getUILinks().size() == 0) {
removeConfigPort(l.getSrc().getParentNode(), l.getSrc().getName());
}
}
swUIObjects.getChildren().remove(l);
}
e.getUILinks().clear();
e.removeFromGroup(swUIObjects);
}
swConfig.removeNode(n);
swTree.removeConfigNode(n);
n.getUINode().removeFromGroup(swUIObjects);
break;
case "HWBlock":
hwConfig.removeNode(n);
hwTree.removeConfigNode(n);
n.getUINode().removeFromGroup(hwUIObjects);
break;
case "Circuit":
eeConfig.removeNode(n);
eeTree.removeConfigNode(n);
n.getUINode().removeFromGroup(eeUIObjects);
break;
}
// Remove any links associated with this node
for (Endpoint e : n.getEndpoints()) {
Iterator<? extends Link> linkit = e.getLinks().iterator();
while (linkit.hasNext()) {
Link l = linkit.next();
linkit.remove();
removeConfigLink(l);
}
}
n = null;
}
public void removeMatchingConfigNodes(Node node, Configuration config) {
ArrayList<Node> toRemove = new ArrayList<Node>();
boolean isCustomTopic = "topic".equals(node.getAnnotation("custom-type"));
for (Node n : config.getNodes()) {
if (node.getName().equals(n.getSpec().getName())) {
toRemove.add(n);
}
if (isCustomTopic && "controller".equals(n.getSpec().getAnnotation("custom-type"))) {
String topicName = ((ROSNode) node).getPorts().keySet().iterator().next();
removeConfigPort(n, topicName);
}
}
Iterator<Node> it = toRemove.iterator();
while (it.hasNext()) {
Node n = it.next();
it.remove();
removeConfigNode(n);
}
}
public void removeConfigLink(Link l) {
switch (l.getSrc().getClass().getSimpleName()) {
case "ROSPort":
swConfig.removeLink(l);
swTree.removeConfigLink(l);
l.getUILink().removeFromGroup(swUIObjects);
break;
case "Joint":
hwConfig.removeLink(l);
hwTree.removeConfigLink(l);
l.getUILink().removeFromGroup(hwUIObjects);
break;
case "Circuit":
eeConfig.removeLink(l);
eeTree.removeConfigLink(l);
l.getUILink().removeFromGroup(eeUIObjects);
break;
}
l.destroy();
l = null;
}
public void loadSWLibrary(Path swLib) {
swLibrary.loadLibrary(swLib);
swConfig.setLibrary(swLibrary);
swTree = new ROSLabTree(swLibrary, swConfig, this);
swUIObjects.getChildren().clear();
swTreeView.setRoot(swTree);
swTreeView.setShowRoot(false);
swTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return swTree.new TreeCellImpl();
}
});
}
/**
* @return the library
*/
public Library getSWLibrary() {
return swLibrary;
}
public void loadSWConfig(Path swCon, Library swLib) {
swLibrary = swLib;
swConfig = ConfigurationParser.parseConfigurationYAML(swCon, swLibrary);
swTree = new ROSLabTree(swLibrary, swConfig, this);
swUIObjects.getChildren().clear();
swTreeView.setRoot(swTree);
swTreeView.setShowRoot(false);
swTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return swTree.new TreeCellImpl();
}
});
refreshUINodes();
}
public void loadSWConfig(String swCon, Library swLib) {
swLibrary = swLib;
swConfig = ConfigurationParser.parseConfigurationYAML(swCon, swLibrary);
swTree = new ROSLabTree(swLibrary, swConfig, this);
swUIObjects.getChildren().clear();
swTreeView.setRoot(swTree);
swTreeView.setShowRoot(false);
swTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return swTree.new TreeCellImpl();
}
});
refreshUINodes();
}
/**
* @return the config
*/
public Configuration getSWConfig() {
return swConfig;
}
public void loadHWLibrary(Path hwLib) {
hwLibrary.loadLibrary(hwLib);
hwConfig.setLibrary(hwLibrary);
hwTree = new ROSLabTree(hwLibrary, hwConfig, this);
hwUIObjects.getChildren().clear();
hwTreeView.setRoot(hwTree);
hwTreeView.setShowRoot(false);
hwTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return hwTree.new TreeCellImpl();
}
});
}
/**
* @return the library
*/
public Library getHWLibrary() {
return hwLibrary;
}
public void loadHWConfig(Path hwCon, Library hwLib) {
hwLibrary = hwLib;
hwConfig = ConfigurationParser.parseConfigurationYAML(hwCon, hwLibrary);
hwTree = new ROSLabTree(hwLibrary, hwConfig, this);
hwUIObjects.getChildren().clear();
hwTreeView.setRoot(hwTree);
hwTreeView.setShowRoot(false);
hwTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return hwTree.new TreeCellImpl();
}
});
refreshUINodes();
}
public void loadHWConfig(String hwCon, Library hwLib) {
hwLibrary = hwLib;
hwConfig = ConfigurationParser.parseConfigurationYAML(hwCon, hwLibrary);
hwTree = new ROSLabTree(hwLibrary, hwConfig, this);
hwUIObjects.getChildren().clear();
hwTreeView.setRoot(hwTree);
hwTreeView.setShowRoot(false);
hwTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return hwTree.new TreeCellImpl();
}
});
refreshUINodes();
}
/**
* @return the config
*/
public Configuration getHWConfig() {
return hwConfig;
}
public void loadEELibrary(Path eeLib) {
eeLibrary.loadLibrary(eeLib);
eeConfig.setLibrary(eeLibrary);
eeTree = new ROSLabTree(eeLibrary, eeConfig, this);
eeUIObjects.getChildren().clear();
eeTreeView.setRoot(eeTree);
eeTreeView.setShowRoot(false);
eeTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return eeTree.new TreeCellImpl();
}
});
}
/**
* @return the library
*/
public Library getEELibrary() {
return eeLibrary;
}
public void loadEEConfig(Path eeCon, Library eeLib) {
eeLibrary = eeLib;
eeConfig = ConfigurationParser.parseConfigurationYAML(eeCon, eeLibrary);
eeTree = new ROSLabTree(eeLibrary, eeConfig, this);
eeUIObjects.getChildren().clear();
eeTreeView.setRoot(eeTree);
eeTreeView.setShowRoot(false);
eeTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return eeTree.new TreeCellImpl();
}
});
refreshUINodes();
}
public void loadEEConfig(String eeCon, Library eeLib) {
eeLibrary = eeLib;
eeConfig = ConfigurationParser.parseConfigurationYAML(eeCon, eeLibrary);
eeTree = new ROSLabTree(eeLibrary, eeConfig, this);
eeUIObjects.getChildren().clear();
eeTreeView.setRoot(eeTree);
eeTreeView.setShowRoot(false);
eeTreeView.setCellFactory(new Callback<TreeView<String>, TreeCell<String>>() {
@Override
public TreeCell<String> call(TreeView<String> p) {
return eeTree.new TreeCellImpl();
}
});
refreshUINodes();
}
/**
* @return the config
*/
public Configuration getEEConfig() {
return eeConfig;
}
@FXML
private void openLibrary() {
FileChooser fileChooser = new FileChooser();
// Set extension filter
FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("YAML files (*.yaml)", "*.yaml");
fileChooser.getExtensionFilters().add(extFilter);
// Show open file dialog
File openFile = fileChooser.showOpenDialog(primaryStage);
if (openFile != null) {
switch (mainTabPane.getSelectionModel().getSelectedItem().getText()) {
case "Software":
loadSWLibrary(openFile.toPath());
break;
case "Electrical":
loadEELibrary(openFile.toPath());
break;
case "Mechanical":
loadHWLibrary(openFile.toPath());
break;
}
}
}
@FXML
private void saveLibrary() {
FileChooser fileChooser = new FileChooser();
// Set extension filter
FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("YAML files (*.yaml)", "*.yaml");
fileChooser.getExtensionFilters().add(extFilter);
// Set initial filename
switch (mainTabPane.getSelectionModel().getSelectedItem().getText()) {
case "Software":
fileChooser.setInitialFileName("swLibrary.yaml");
break;
case "Electrical":
fileChooser.setInitialFileName("eeLibrary.yaml");
break;
case "Mechanical":
fileChooser.setInitialFileName("hwLibrary.yaml");
break;
}
// Show save file dialog
File saveFile = fileChooser.showSaveDialog(primaryStage);
if (saveFile != null) {
switch (mainTabPane.getSelectionModel().getSelectedItem().getText()) {
case "Software":
LibraryParser.emitLibraryYAML(swLibrary, saveFile);
break;
case "Electrical":
LibraryParser.emitLibraryYAML(eeLibrary, saveFile);
break;
case "Mechanical":
LibraryParser.emitLibraryYAML(hwLibrary, saveFile);
break;
}
}
}
@FXML
private void openConfiguration() {
FileChooser fileChooser = new FileChooser();
// Set extension filter
FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("LAB files (*.lab)", "*.lab");
fileChooser.getExtensionFilters().add(extFilter);
// Show open file dialog
File openFile = fileChooser.showOpenDialog(primaryStage);
// Unzip package and load all YAML files
FileInputStream fis = null;
ZipInputStream zipIs = null;
ZipEntry zEntry = null;
if (openFile != null) {
// Read the Library YAML files
try {
fis = new FileInputStream(openFile);
zipIs = new ZipInputStream(new BufferedInputStream(fis));
while ((zEntry = zipIs.getNextEntry()) != null) {
try {
byte[] tmp = new byte[4 * 1024];
ByteArrayOutputStream bos = new ByteArrayOutputStream();
int size = 0;
while ((size = zipIs.read(tmp)) != -1) {
bos.write(tmp, 0, size);
}
bos.flush();
switch (zEntry.getName()) {
case "swLibrary.yaml":
swLibrary = LibraryParser.parseLibraryYAML(bos.toString());
break;
case "eeLibrary.yaml":
eeLibrary = LibraryParser.parseLibraryYAML(bos.toString());
break;
case "hwLibrary.yaml":
hwLibrary = LibraryParser.parseLibraryYAML(bos.toString());
break;
}
bos.reset();
}
catch (Exception e) {
e.printStackTrace();
}
}
}
catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally {
try {
if (fis != null) {
fis.close();
}
if (zipIs != null) {
zipIs.close();
}
}
catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// Read the Configuration YAML files
try {
fis = new FileInputStream(openFile);
zipIs = new ZipInputStream(new BufferedInputStream(fis));
while ((zEntry = zipIs.getNextEntry()) != null) {
try {
byte[] tmp = new byte[4 * 1024];
ByteArrayOutputStream bos = new ByteArrayOutputStream();
int size = 0;
while ((size = zipIs.read(tmp)) != -1) {
bos.write(tmp, 0, size);
}
bos.flush();
logger.debug(bos.toString());
switch (zEntry.getName()) {
case "swConfig.yaml":
loadSWConfig(bos.toString(), swLibrary);
break;
case "eeConfig.yaml":
loadEEConfig(bos.toString(), eeLibrary);
break;
case "hwConfig.yaml":
loadHWConfig(bos.toString(), hwLibrary);
break;
}
bos.reset();
}
catch (Exception e) {
e.printStackTrace();
}
}
}
catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally {
try {
if (fis != null) {
fis.close();
}
if (zipIs != null) {
zipIs.close();
}
}
catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
@FXML
private void saveConfiguration() {
FileChooser fileChooser = new FileChooser();
// Set extension filter
FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("LAB files (*.lab)", "*.lab");
fileChooser.getExtensionFilters().add(extFilter);
// Set initial filename
fileChooser.setInitialFileName("MyConfig_" + LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE) + ".lab");
// Show save file dialog
File saveFile = fileChooser.showSaveDialog(primaryStage);
if (saveFile != null) {
FileOutputStream fos = null;
ZipOutputStream zipOut = null;
try {
fos = new FileOutputStream(saveFile);
zipOut = new ZipOutputStream(new BufferedOutputStream(fos));
InputStream is = new ByteArrayInputStream(ConfigurationParser.emitConfigurationYAML(swConfig).getBytes());
ZipEntry ze = new ZipEntry("swConfig.yaml");
zipOut.putNextEntry(ze);
byte[] tmp = new byte[4 * 1024];
int size = 0;
while ((size = is.read(tmp)) != -1) {
zipOut.write(tmp, 0, size);
}
zipOut.flush();
zipOut.closeEntry();
is = new ByteArrayInputStream(LibraryParser.emitLibraryYAML(swLibrary).getBytes());
ze = new ZipEntry("swLibrary.yaml");
zipOut.putNextEntry(ze);
tmp = new byte[4 * 1024];
while ((size = is.read(tmp)) != -1) {
zipOut.write(tmp, 0, size);
}
zipOut.flush();
zipOut.closeEntry();
is = new ByteArrayInputStream(ConfigurationParser.emitConfigurationYAML(eeConfig).getBytes());
ze = new ZipEntry("eeConfig.yaml");
zipOut.putNextEntry(ze);
tmp = new byte[4 * 1024];
while ((size = is.read(tmp)) != -1) {
zipOut.write(tmp, 0, size);
}
zipOut.flush();
zipOut.closeEntry();
is = new ByteArrayInputStream(LibraryParser.emitLibraryYAML(eeLibrary).getBytes());
ze = new ZipEntry("eeLibrary.yaml");
zipOut.putNextEntry(ze);
tmp = new byte[4 * 1024];
while ((size = is.read(tmp)) != -1) {
zipOut.write(tmp, 0, size);
}
zipOut.flush();
zipOut.closeEntry();
is = new ByteArrayInputStream(ConfigurationParser.emitConfigurationYAML(hwConfig).getBytes());
ze = new ZipEntry("hwConfig.yaml");
zipOut.putNextEntry(ze);
tmp = new byte[4 * 1024];
while ((size = is.read(tmp)) != -1) {
zipOut.write(tmp, 0, size);
}
zipOut.flush();
zipOut.closeEntry();
is = new ByteArrayInputStream(LibraryParser.emitLibraryYAML(hwLibrary).getBytes());
ze = new ZipEntry("hwLibrary.yaml");
zipOut.putNextEntry(ze);
tmp = new byte[4 * 1024];
while ((size = is.read(tmp)) != -1) {
zipOut.write(tmp, 0, size);
}
zipOut.flush();
zipOut.closeEntry();
zipOut.close();
}
catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally {
try {
if (fos != null) {
fos.close();
}
}
catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
@FXML
private void tabChanged() {
// TODO Nothing yet...
}
/**
* Opens a dialog to edit details for the specified person. If the user
* clicks OK, the changes are saved into the provided person object and true
* is returned.
*
* @param person
* the person object to be edited
* @return true if the user clicked OK, false otherwise.
*/
public boolean showNewLinkDialog(Configuration config) {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("ui/general/NewLinkDialog.fxml"));
GridPane page = (GridPane) loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("New Link");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Set the person into the controller.
NewLinkDialog controller = loader.getController();
controller.setDialogStage(dialogStage);
controller.setEndpoints(config.getEndpoints());
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isOkClicked();
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* Opens a dialog to edit details for the specified person. If the user
* clicks OK, the changes are saved into the provided person object and
* true
* is returned.
*
* @param person
* the person object to be edited
* @return true if the user clicked OK, false otherwise.
*/
public boolean showNewNodeDialog(Library library) {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("ui/general/NewNodeDialog.fxml"));
GridPane page = (GridPane) loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("New Node");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Set the person into the controller.
NewNodeDialog controller = loader.getController();
controller.setDialogStage(dialogStage);
ArrayList<Node> nodeTypes = new ArrayList<Node>(library.getNodes());
ArrayList<String> configNodeNames = new ArrayList<String>();
Configuration config = null;
if (library == swLibrary) {
for (Node configNode : swConfig.getNodes()) {
configNodeNames.add(configNode.getName());
if ("controller".equals(configNode.getAnnotation("custom-type"))) {
Node toRemove = null;
for (Node n : nodeTypes) {
if (configNode.getSpec().getName().equals(n.getName())) {
toRemove = n;
}
}
nodeTypes.remove(toRemove);
}
}
config = swConfig;
}
else if (library == hwLibrary) {
config = hwConfig;
}
else if (library == eeLibrary) {
config = eeConfig;
}
for (Node configNode : config.getNodes()) {
configNodeNames.add(configNode.getName());
}
controller.setConfigNodeNames(configNodeNames);
controller.setNodes(nodeTypes);
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isAddClicked();
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* Opens a dialog to edit details for the specified person. If the user
* clicks OK, the changes are saved into the provided person object and
* true
* is returned.
*
* @param person
* the person object to be edited
* @return true if the user clicked OK, false otherwise.
*/
public boolean showCustomNodeDialog(String nodeType) {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("ui/software/NewCustom" + nodeType + "Dialog.fxml"));
GridPane page = (GridPane) loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("New Custom " + nodeType + " Node");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Set the person into the controller.
if ("Controller".equals(nodeType)) {
NewCustomControllerDialog controller = loader.getController();
controller.setDialogStage(dialogStage);
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isAddClicked();
}
else {
NewCustomTopicDialog controller = loader.getController();
controller.setDialogStage(dialogStage);
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isAddClicked();
}
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
public boolean showEditRateDialog(Node node) {
if (!(node instanceof ROSNode && "controller".equals(node.getAnnotation("custom-type")))) {
return false;
}
ROSNode rosNode = (ROSNode) node;
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("ui/software/EditRateDialog.fxml"));
GridPane page = (GridPane) loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("Edit Rate");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
EditRateDialog controller = loader.getController();
controller.setNode(rosNode);
controller.setDialogStage(dialogStage);
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isAddClicked();
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
public boolean showLoadLibraryDialog() {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("ui/software/LoadLibraryDialog.fxml"));
GridPane page = (GridPane) loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("Load Library");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
LoadLibraryDialog controller = loader.getController();
controller.setDialogStage(dialogStage);
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isAddClicked();
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* Opens a dialog to edit details for the specified person. If the user
* clicks OK, the changes are saved into the provided person object and
* true
* is returned.
*
* @param node
* @param person
* the person object to be edited
* @return true if the user clicked OK, false otherwise.
*/
public boolean showNewCustomPortDialog(Node node) {
try {
// Load the fxml file and create a new stage for the popup dialog.
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("ui/software/NewCustomPortDialog.fxml"));
GridPane page = (GridPane) loader.load();
// Create the dialog Stage.
Stage dialogStage = new Stage();
dialogStage.setTitle("New Port");
dialogStage.initModality(Modality.WINDOW_MODAL);
dialogStage.initOwner(primaryStage);
Scene scene = new Scene(page);
dialogStage.setScene(scene);
// Set the person into the controller.
NewCustomPortDialog controller = loader.getController();
controller.setDialogStage(dialogStage);
controller.setNode(node);
controller.setRLController(this);
// Show the dialog and wait until the user closes it
dialogStage.showAndWait();
return controller.isAddClicked();
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
public void addConfigPort(Node node, String pName, String pType, boolean isSub) {
ROSPort toAdd = new ROSPort(pName, ((ROSNode) node), new ROSTopic(pName, new ROSMsgType(pType), isSub), false, false);
((ROSNode) node).addPort(toAdd); // Add to this node's features
((ROSNode) node.getSpec()).addPort(toAdd); // Add to library node's features
updateLibraryNode((ROSNode) node.getSpec()); // Update library with library node
// for (Node n : swConfig.getNodes()) {
// if (n instanceof ROSNode && n.getSpec().equals(node.getSpec())) {
// ((ROSNode) n).addPort(new ROSPort(pName, ((ROSNode) node), new
// ROSTopic(pName, new ROSMsgType(pType), isSub), false, false));
// }
// }
refreshConfigPorts();
refreshSWConfigLinks();
}
public void removeConfigPort(Node node, String pName) {
((ROSNode) node).removePort(pName);
// Add to library node's features
((ROSNode) node.getSpec()).removePort(pName);
// Update library with library node
updateLibraryNode((ROSNode) node.getSpec());
// for (Node n : swConfig.getNodes()) {
// if (n instanceof ROSNode && n.getSpec().equals(node.getSpec())) {
// ((ROSNode) n).removePort(pName);
// }
// }
refreshConfigPorts();
refreshSWConfigLinks();
}
public void refreshConfigPorts() {
for (Node n : swConfig.getNodes()) {
UINode uin = n.getUINode();
for (UIEndpoint e : uin.getUIEndpoints()) {
e.removeFromGroup(swUIObjects);
}
uin.resetEndpoints(this);
for (UIEndpoint e : uin.getUIEndpoints()) {
e.addToGroup(swUIObjects);
}
}
}
// TODO Switch system.out calls to Debug calls
public void refreshSWConfigLinks() {
for (Link link : swConfig.getLinks()) {
removeConfigLink(link);
}
swConfig.getLinks().clear();
for (Node nodeA : swConfig.getNodes()) {
for (Node nodeB : swConfig.getNodes()) {
for (Endpoint endA : nodeA.getEndpoints()) {
for (Endpoint endB : nodeB.getEndpoints()) {
System.out.println("Matching " + endB.getParent().getName() + " " + endA.getParent().getName());
if (endA.equals(endB)) {
continue;
}
if (endA.canConnect(endB) && endA instanceof ROSPort && ((ROSPort) endA).isSubscriber()) {
addConfigLink(endB.connect(endA));
System.out.println("Adding");
}
}
}
}
}
System.out.println("Config link count: " + swConfig.getLinks().size());
for (Link l : swConfig.getLinks()) {
System.out.println(l.getSrc().getParent().getName() + " -> " + l.getDest().getParent().getName());
}
}
public void refreshUINodes() {
ArrayList<Node> nodes = new ArrayList<Node>();
nodes.addAll(swConfig.getNodes());
nodes.addAll(eeConfig.getNodes());
nodes.addAll(hwConfig.getNodes());
for (Node n : nodes) {
if (n.getUINode() == null) {
n.setUINode(new UINode(n, 400, 400));
}
Group grp = null;
switch (n.getClass().getSimpleName()) {
case "ROSNode":
grp = swUIObjects;
break;
case "HWBlock":
grp = hwUIObjects;
break;
case "Circuit":
grp = eeUIObjects;
break;
}
if (!grp.getChildren().contains(n.getUINode())) {
n.getUINode().addToGroup(grp, this);
}
// Order all of the UI objects
for (Object nn : grp.getChildren().toArray()) {
if (nn instanceof UINode) {
((UINode) nn).toTheFront();
}
if (nn instanceof UILink) {
((UILink) nn).toBack();
}
}
}
}
public void setStage(Stage primaryStage) {
this.primaryStage = primaryStage;
}
public Stage getStage() {
return this.primaryStage;
}
// TODO Update method for use with all types of configs, not just SW
public void killDrawTasks() {
for (Node n : swConfig.getNodes()) {
for (Endpoint e : n.getEndpoints()) {
e.getUIEndpoint().killDrawTask();
}
}
}
}
| |
/* Copyright (c) restSQL Project Contributors. Licensed under MIT. */
package org.restsql.core.impl.serial;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.restsql.core.ColumnMetaData;
import org.restsql.core.ResponseSerializer;
import org.restsql.core.ResponseValue;
import org.restsql.core.SqlResource;
import org.restsql.core.WriteResponse;
/**
* Converts read/write results to a JSON string.
*
* @author Mark Sawers
*/
public class JsonResponseSerializer implements ResponseSerializer {
@Override
public String getSupportedMediaType() {
return "application/json";
}
/**
* Converts flat select results to a JSON array.
*
* @param sqlResource SQL resource
* @param resultSet results
* @return JSON string
*/
// TODO: move column get from metadata outside of result set while loop!!!
@Override
public String serializeReadFlat(final SqlResource sqlResource, final ResultSet resultSet)
throws SQLException {
final StringBuilder body = new StringBuilder(1000);
int rowCount = 0;
while (resultSet.next()) {
if (rowCount > 0) {
body.append(",");
}
rowCount++;
body.append("\n\t\t{ ");
final List<ColumnMetaData> columns = sqlResource.getMetaData().getAllReadColumns();
boolean firstPair = true;
for (ColumnMetaData column : columns) {
if (!column.isNonqueriedForeignKey()) {
Object value = column.getResultByNumber(resultSet);
addAttribute(firstPair, body, column.getColumnLabel(), value);
if (value != null) {
firstPair = false;
}
}
}
body.append(" }");
}
return completeDoc(DocType.Read, sqlResource, null, body);
}
/**
* Converts hierarchical select results to a JSON array.
*
* @param sqlResource SQL resource
* @param results results
* @return JSON string
*/
@Override
public String serializeReadHierarchical(final SqlResource sqlResource,
final List<Map<String, Object>> results) {
final StringBuilder body = new StringBuilder(results.size() * 100);
serializeReadRowsHierarchical(sqlResource, results, body, 1);
return completeDoc(DocType.Read, sqlResource, null, body);
}
/**
* Converts write results to a JSON object.
*
* @param response response
* @return XML doc
*/
@Override
public String serializeWrite(final SqlResource sqlResource, final WriteResponse response) {
StringBuilder body = null;
if (response.getRows() != null) {
body = new StringBuilder(response.getRows().size() * 100);
serializeWriteRowsHierarchical(sqlResource, response.getRows(), body, 1);
}
return completeDoc(DocType.Write, sqlResource,
new Object[] { "rowsAffected", response.getRowsAffected() }, body);
}
// Package level utils (for testability)
void addAttribute(final boolean firstAttribute, final StringBuilder string, final String name,
final Object value) {
if (value != null) {
if (!firstAttribute) {
string.append(", ");
}
string.append(JsonUtil.quote(name));
string.append(": ");
if (value instanceof Number || value instanceof Boolean) {
string.append(value);
} else {
string.append(JsonUtil.quote(value.toString()));
}
}
}
// Private utils
private String completeDoc(final DocType docType, final SqlResource sqlResource,
final Object[] attributes, final StringBuilder body) {
int docLength = (body != null) ? body.length() + 250 : 250;
StringBuilder doc = new StringBuilder(docLength);
// Init doc
if (docType == DocType.Read) {
doc.append("{ \"");
doc.append(sqlResource.getMetaData().getParent().getRowSetAlias());
doc.append("\": [");
} else { // DocType.Write
doc.append("{ ");
}
// Add opening attributes
if (attributes != null) {
for (int i = 0; i < attributes.length; i += 2) {
addAttribute(true, doc, String.valueOf(attributes[i]), attributes[i + 1]);
}
}
// Close doc and insert the body, if non-empty
if (body != null && body.length() > 0) {
if (docType == DocType.Write) {
if (attributes != null) {
doc.append(",\n\t");
}
doc.append("\"");
doc.append(sqlResource.getMetaData().getParent().getRowSetAlias());
doc.append("\": [");
}
doc.append(body);
doc.append("\n\t]\n}");
} else {
if (docType == DocType.Read) {
doc.append("] }");
} else { // DocType.Write
doc.append(" }");
}
}
return doc.toString();
}
/** One-level recursive method to serialize hierarchical results. */
@SuppressWarnings("unchecked")
private void serializeReadRowsHierarchical(final SqlResource sqlResource,
final List<Map<String, Object>> rows, final StringBuilder body, final int level) {
final int rowSize = rows.size();
for (int i = 0; i < rowSize; i++) {
final boolean lastRow = i == rowSize - 1;
List<Map<String, Object>> childRows = null;
final Map<String, Object> row = rows.get(i);
if (level == 1) {
body.append("\n\t\t{ ");
} else {
body.append("\n\t\t\t\t{ ");
}
// Do attribute columns
boolean firstPair = true;
for (final String columnLabel : row.keySet()) {
final Object value = row.get(columnLabel);
if (!(value instanceof List<?>)) {
addAttribute(firstPair, body, columnLabel, value);
if (value != null) {
firstPair = false;
}
} else {
childRows = (List<Map<String, Object>>) value;
}
}
// Do embedded child object columns
if (level == 1 && childRows.size() > 0) {
body.append(",\n\t\t\t\"");
body.append(sqlResource.getMetaData().getChild().getRowSetAlias());
body.append("\": [");
serializeReadRowsHierarchical(sqlResource, childRows, body, 2);
body.append("\n\t\t\t]");
}
// Add line ending
if (level == 1 && childRows.size() > 0) {
body.append("\n\t\t}");
} else {
body.append(" }");
}
if (!lastRow) {
body.append(",");
}
}
}
/** One-level recursive method to serialize hierarchical results. */
@SuppressWarnings("unchecked")
private void serializeWriteRowsHierarchical(final SqlResource sqlResource,
final List<Set<ResponseValue>> rows, final StringBuilder body, final int level) {
final int rowSize = rows.size();
for (int i = 0; i < rowSize; i++) {
final boolean lastRow = i == rowSize - 1;
List<Set<ResponseValue>> childRows = null;
final Set<ResponseValue> row = rows.get(i);
if (level == 1) {
body.append("\n\t\t{ ");
} else {
body.append("\n\t\t\t\t{ ");
}
// Do attribute columns
boolean firstPair = true;
for (final ResponseValue value : row) {
if (!(value.getValue() instanceof List<?>)) {
addAttribute(firstPair, body, value.getName(), value.getValue());
if (value.getValue() != null) {
firstPair = false;
}
} else {
childRows = (List<Set<ResponseValue>>) value.getValue();
}
}
// Do embedded child object columns
if (level == 1 && childRows != null && childRows.size() > 0) {
body.append(",\n\t\t\t\"");
body.append(sqlResource.getMetaData().getChild().getRowSetAlias());
body.append("\": [");
serializeWriteRowsHierarchical(sqlResource, childRows, body, 2);
body.append("\n\t\t\t]");
}
// Add line ending
if (level == 1 && childRows != null && childRows.size() > 0) {
body.append("\n\t\t}");
} else {
body.append(" }");
}
if (!lastRow) {
body.append(",");
}
}
}
static enum DocType {
Read, Write;
}
}
| |
package com.example.phross.grouponcat;
import android.app.ActionBar;
import android.content.Context;
import android.content.Intent;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.FrameLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.example.phross.grouponcat.data.Deal;
import com.example.phross.grouponcat.data.SettingValues;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class MainActivity extends AppCompatActivity implements AdapterView.OnItemClickListener, LocationListener {
private static final String TAG = "Main Activity";
private LocationManager locationManager;
private ProgressBar progressBar;
private DealAdapter dealAdapter;
private ListView listView;
private TextView noDealsFoundView;
private TextView lookingForDealsView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
locationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
locationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 1000* SettingValues.refreshRate, 35, this);
progressBar = new ProgressBar(this);
setMainScreen();
dealAdapter = new DealAdapter(this);
listView.setAdapter(dealAdapter);
listView.setOnItemClickListener(this);
// test();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.groupon.com/dispatch/us/deal/" + dealAdapter.getItem(position).id));
startActivity(intent);
}
@Override
public void onLocationChanged(final Location location) {
AsyncTask<Void, Void, List<Deal>> getDeals = new AsyncTask<Void, Void, List<Deal>>() {
@Override
protected void onPreExecute() {
Log.d(TAG, "in pre execute");
setLoading();
}
@Override
protected List<Deal> doInBackground(Void... params) {
List<Deal> deals = new ArrayList<Deal>();
// try {
// Thread.sleep(5000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
deals = GrouponDeals.getDeals(location);
// for (String id : ids) {
// deals.add(GrouponDeals.getDeal(id, location));
// }
Log.d(TAG, "got all the deals");
return deals;
}
@Override
protected void onPostExecute(List<Deal> result) {
Log.d(TAG, "in post execute");
Collections.sort(result, new Comparator<Deal>() {
@Override
public int compare(Deal lhs, Deal rhs) {
return (int) (lhs.distance - rhs.distance);
}
});
setAfterQuery(result.size() != 0);
dealAdapter.setData(result);
}
};
getDeals.execute();
Log.d(TAG, "done");
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
Log.d(TAG, "status changed");
}
@Override
public void onProviderEnabled(String provider) {
Log.d(TAG, "provider enabled");
setMainScreen();
}
@Override
public void onProviderDisabled(String provider) {
Log.d(TAG, "provider disabled");
setNoGPSScreen();
}
public void test() {
// final Location location = locationClient.getLastLocation();
final Location location = new Location("us");
location.setLatitude(41.897607);
location.setLongitude(-87.624062);
final String[] ids = {"ba984176-47db-fd4b-693a-5b7a507676a0",
"daf41a19-2d44-b30e-a5ed-dca6eab6c818"};
}
public void setMainScreen() {
setContentView(R.layout.activity_main);
addContentView(progressBar, new FrameLayout.LayoutParams(ActionBar.LayoutParams.WRAP_CONTENT,
ActionBar.LayoutParams.WRAP_CONTENT, Gravity.CENTER));
listView = (ListView) findViewById(R.id.ListView1);
noDealsFoundView = (TextView) findViewById(R.id.noDealsView);
lookingForDealsView = (TextView) findViewById(R.id.lookingForDealsView);
noDealsFoundView.setVisibility(TextView.INVISIBLE);
lookingForDealsView.setVisibility(TextView.INVISIBLE);
listView.setVisibility(ListView.INVISIBLE);
}
public void setNoGPSScreen() {
setContentView(R.layout.gps_off);
}
public void setLoading() {
progressBar.setVisibility(ProgressBar.VISIBLE);
listView.setVisibility(ListView.INVISIBLE);
noDealsFoundView.setVisibility(TextView.INVISIBLE);
lookingForDealsView.setVisibility(TextView.VISIBLE);
}
public void setAfterQuery(boolean foundDeals) {
progressBar.setVisibility(ProgressBar.INVISIBLE);
lookingForDealsView.setVisibility(TextView.INVISIBLE);
if (foundDeals) {
listView.setVisibility(ListView.VISIBLE);
} else {
noDealsFoundView.setVisibility(TextView.VISIBLE);
}
}
}
| |
package de.saumya.mojo.proxy;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import de.saumya.mojo.proxy.Controller.FileLocation.Type;
import de.saumya.mojo.ruby.GemScriptingContainer;
public class Controller {
private static final String SHA1 = ".sha1";
private static final String RUBYGEMS_URL = "https://rubygems.org/gems";
private static final String RUBYGEMS_S3_URL = "http://s3.amazonaws.com/production.s3.rubygems.org/gems";
public static final String[] PLATFORMS = { "-universal-java-1.5",
"-universal-java-1.6",
"-universal-java-1.7",
"-universal-java-1.8",
"-universal-java",
"-universal-jruby-1.2",
"-jruby",
"-java",
"-universal-ruby-1.8.7",
"-universal-ruby-1.9.2",
"-universal-ruby-1.9.3",
"-universal-ruby",
"" };
static final Map<String, Set<String>> BROKEN_GEMS = new HashMap<String, Set<String>>();
static {
// activeresource-2.0.0 does not exist !!!
Set<String> rails = new TreeSet<String>();
rails.add("2.0.0");
BROKEN_GEMS.put("rails", rails);
// juby-openssl-0.7.6 can not open gem with jruby-1.6.8
Set<String> openssl = new TreeSet<String>();
openssl.add("0.7.6");
BROKEN_GEMS.put("jruby-openssl", openssl);
}
private final File localStorage;
private final GemScriptingContainer script = new GemScriptingContainer();
public static class FileLocation {
enum Type {
XML_CONTENT,
HTML_CONTENT,
ASCII_FILE,
XML_FILE,
REDIRECT,
NOT_FOUND ,
ASCII_CONTENT,
REDIRECT_TO_DIRECTORY,
TEMP_UNAVAILABLE }
public FileLocation() {
this(null, null, null, Type.REDIRECT_TO_DIRECTORY);
}
public FileLocation(String message) {
this(null, null, message, Type.NOT_FOUND);
}
public FileLocation(String content, Type type) {
this(null, null, content, type);
}
public FileLocation(File local, Type type) {
this(local, null, null, type);
}
public FileLocation(URL remote) {
this(null, remote, null, Type.REDIRECT);
}
private FileLocation(File localFile, URL remoteFile, String content, Type type) {
this.content = content;
this.remoteUrl = remoteFile;
this.localFile = localFile;
this.type = type;
}
final File localFile;
final URL remoteUrl;
final String content;
final Type type;
}
private final Object createPom;
// assume there will be only one instance of this class per servlet container
private final Set<String> fileLocks = new HashSet<String>();
public Controller(File storage) throws IOException{
this.localStorage = storage;
this.localStorage.mkdirs();
this.createPom = script.runScriptletFromClassloader("create_pom.rb");
}
public FileLocation locate(String path) throws IOException{
// release/rubygems/name/version
// release/rubygems/name/version/
// release/rubygems/name/version/name-version.gem
// release/rubygems/name/version/name-version.gem.md5
// release/rubygems/name/version/name-version.pom
// release/rubygems/name/version/name-version.pom.md5
// release/rubygems/name
// release/rubygems/name/
// release/rubygems/name/maven-metadata.xml
path = path.replaceAll("/+", "/");
if(path.endsWith("/")){
path += "index.html";
}
String[] parts = path.split("/");
if(parts.length == 0){
// TODO make listing with two directories 'releases', 'prereleases'
return new FileLocation("for maven", Type.ASCII_CONTENT);
}
else {
boolean prereleases = parts[0].contains("pre");
if(parts.length > 1 && !"rubygems".equals(parts[1])){
return notFound("Only rubygems/ groupId is supported through this proxy.");
}
switch(parts.length){
case 1:
case 2:
// TODO make listing with one directory 'rubygems'
return notFound("directory listing not implemented");
case 3:
if("index.html".equals(parts[2])) {
return notFound("directory listing not implemented");
}
else {
return new FileLocation();
}
case 4:
if("maven-metadata.xml".equals(parts[3])){
return metadata(parts[2], prereleases);
}
else if(("maven-metadata.xml" + SHA1).equals(parts[3])){
return metadataSha1(parts[2], prereleases);
}
else if("index.html".equals(parts[3])){
return versionListDirectory(parts[2], path, prereleases);
}
else {
return notFound("not found");
}
case 5:
String filename = parts[4].replace("-SNAPSHOT", "");
if("index.html".equals(filename)){
return directory(parts[2], parts[3], path);
}
if(filename.endsWith(".gem")){
// keep it backward compatible
filename = filename.replace("-java.gem", ".gem");
File local = new File(localStorage, filename.replace(".gem", ".pom"));
if(!local.exists()){
try {
if (!createFiles(parts[2], parts[3])){
return new FileLocation(filename + " is being generated", Type.TEMP_UNAVAILABLE);
}
} catch (FileNotFoundException e) {
return notFound("not found");
}
}
String url = null;
for( String platform : PLATFORMS )
{
url = RUBYGEMS_S3_URL + "/" + filename.replace(".gem", platform + ".gem");
if ( exists( url ) ) {
break;
}
}
return new FileLocation( new URL( url ) );
}
if(filename.endsWith(SHA1) || filename.endsWith(".pom")){
File local = new File(localStorage, filename);
if(!local.exists()){
try {
if (!createFiles(parts[2], parts[3])){
return new FileLocation(filename + " is being generated", Type.TEMP_UNAVAILABLE);
}
} catch (FileNotFoundException e) {
return notFound("not found");
}
}
return new FileLocation(local, filename.endsWith(SHA1)? Type.ASCII_FILE: Type.XML_FILE);
}
return notFound("not found");
default:
return notFound("Completely unhandleable request!");
}
}
}
public boolean exists(String url){
try {
HttpURLConnection con = (HttpURLConnection) new URL(url).openConnection();
con.setRequestMethod("HEAD");
return con.getResponseCode() == HttpURLConnection.HTTP_OK;
}
catch (FileNotFoundException e) {
//e.printStackTrace();
return false;
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}
private FileLocation directory(String gemname, String version, String path) throws IOException {
HtmlDirectoryBuilder builder = new HtmlDirectoryBuilder();
builder.buildHeader(path);
String basename = gemname + "-" + version;
String pomfile = basename + ".pom";
String gemfile = basename + ".gem";
builder.buildFileLink(pomfile);
builder.buildFileLink(pomfile + SHA1);
builder.buildFileLink(gemfile);
builder.buildFileLink(gemfile + SHA1);
builder.buildFooter();
return new FileLocation(builder.toHTML(), Type.HTML_CONTENT);
}
private boolean createFiles(String name, String version) throws IOException {
String gemname = name + "-" + version.replace( "-SNAPSHOT", "" );
try {
synchronized (fileLocks) {
if (fileLocks.contains(gemname)) {
return false;
}
else {
fileLocks.add(gemname);
}
}
File gemfile = new File(this.localStorage, gemname + ".gem");
File gemfileSha = new File(this.localStorage, gemname + ".gem" + SHA1);
File pomfile = new File(this.localStorage, gemname + ".pom");
File pomfileSha = new File(this.localStorage, gemname + ".pom" + SHA1);
if (!(gemfileSha.exists() && pomfile.exists() && pomfileSha.exists())) {
String url = null;
for( String platform : PLATFORMS )
{
url = RUBYGEMS_URL + "/" + gemname + platform + ".gem";
try {
downloadGemfile(gemfile, new URL(url));
break;
}
catch (FileNotFoundException ignore) {
}
}
String pom = createPom(gemfile);
writeUTF8(pomfile, pom);
writeUTF8(pomfileSha, sha1(pom));
}
// we do not keep the gemfile on disc
gemfile.delete();
return true;
}
finally {
synchronized (fileLocks) {
fileLocks.remove(gemname);
}
}
}
private String createPom(File gemfile) {
// protect the script container
synchronized (script) {
return script.callMethod(createPom, "create", gemfile.getAbsolutePath(), String.class)
.replaceAll("&", "&").replaceAll("&amp;", "&");
}
}
private void downloadGemfile(File gemfile, URL url) throws IOException {
InputStream input = null;
OutputStream output = null;
MessageDigest sha = newSha1Digest();
try {
input = new BufferedInputStream(url.openStream());
output = new BufferedOutputStream(new FileOutputStream(gemfile));
int b = input.read();
while(b != -1){
output.write(b);
sha.update((byte) b);
b = input.read();
}
}
finally {
if( input != null){
input.close();
}
if( output != null){
output.close();
writeSha(new File(gemfile.getAbsolutePath() + SHA1), sha);
}
}
}
private void writeSha(File file, MessageDigest sha) throws IOException {
writeUTF8(file, toHex(sha.digest()));
}
private void writeUTF8(File file, String content) throws IOException {
PrintWriter writer = null;
try {
writer = new PrintWriter(new OutputStreamWriter(new FileOutputStream(file),
Charset.forName("UTF-8")));
writer.print(content);
}
finally {
if(writer != null){
writer.close();
}
}
}
private FileLocation versionListDirectory(String name, String path, boolean prereleases) throws IOException {
HtmlDirectoryBuilder html = new HtmlDirectoryBuilder();
html.buildHeader(path);
VersionDirectoryBuilder builder = new VersionDirectoryBuilder(name, prereleases, html, BROKEN_GEMS.get(name));
builder.build();
html.buildFileLink("maven-metadata.xml");
html.buildFileLink("maven-metadata.xml" + SHA1);
html.buildFooter();
return new FileLocation(html.toHTML(), Type.HTML_CONTENT);
}
private FileLocation notFound(String message) {
return new FileLocation(message);
}
private FileLocation metadata(String name, boolean prereleases) throws IOException {
MavenMetadataBuilder builder = new MavenMetadataBuilder(name, prereleases, BROKEN_GEMS.get(name));
builder.build();
return new FileLocation(builder.toXML(), Type.XML_CONTENT);
}
private FileLocation metadataSha1(String name, boolean prereleases) throws IOException {
MavenMetadataBuilder builder = new MavenMetadataBuilder(name, prereleases, BROKEN_GEMS.get(name));
builder.build();
return new FileLocation(sha1(builder.toXML()), Type.ASCII_CONTENT);
}
private String sha1(String text) {
MessageDigest md = newSha1Digest();
try {
md.update(text.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException e) {
throw new RuntimeException("should not happen", e);
}
return toHex(md.digest());
}
private MessageDigest newSha1Digest() {
MessageDigest md;
try {
md = MessageDigest.getInstance("SHA-1");
}
catch (NoSuchAlgorithmException e) {
throw new RuntimeException("error getting sha1 instance", e);
}
return md;
}
private String toHex(byte[] data) {
StringBuilder buf = new StringBuilder();//data.length * 2);
for (byte b: data) {
if(b < 0){
buf.append(Integer.toHexString(256 + b));
}
else if(b < 16) {
buf.append('0').append(Integer.toHexString(b));
}
else {
buf.append(Integer.toHexString(b));
}
}
return buf.toString();
}
}
| |
/*
* Copyright (c) 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package grails.plugins.quartz;
import org.quartz.CalendarIntervalTrigger;
import org.quartz.DateBuilder;
import org.quartz.JobDetail;
import org.quartz.Trigger;
import org.quartz.CronExpression;
import org.quartz.CronScheduleBuilder;
import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.triggers.CalendarIntervalTriggerImpl;
import org.quartz.impl.triggers.CoreTrigger;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.beans.PropertyAccessorFactory;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import static org.quartz.DateBuilder.*;
import static org.quartz.TriggerBuilder.*;
import static org.quartz.CronScheduleBuilder.*;
import java.beans.PropertyEditorSupport;
import java.text.ParseException;
import java.util.Date;
import java.util.Map;
/**
* TODO: write javadoc
*
* @author Sergey Nebolsin (nebolsin@gmail.com)
*/
public class CustomTriggerFactoryBean implements FactoryBean, InitializingBean {
private Class<Trigger> triggerClass;
private Trigger customTrigger;
private JobDetailImpl jobDetail;
private Map triggerAttributes;
public void afterPropertiesSet() throws ParseException {
if (triggerAttributes.containsKey(GrailsJobClassConstants.START_DELAY)) {
Number startDelay = (Number) triggerAttributes.remove(GrailsJobClassConstants.START_DELAY);
if (jobDetail == null) {
if (triggerAttributes.containsKey(GrailsJobClassConstants.CRON_EXPRESSION)) {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis() + startDelay.longValue()))
.forJob((String)triggerAttributes.get("name"))
.withSchedule(buildCronSchedule())
.build();
} else {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis() + startDelay.longValue()))
.forJob((String)triggerAttributes.get("name"))
.build();
}
} else {
if (triggerAttributes.containsKey(GrailsJobClassConstants.CRON_EXPRESSION)) {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis() + startDelay.longValue()))
.forJob(jobDetail)
.withSchedule(buildCronSchedule())
.build();
} else {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis() + startDelay.longValue()))
.forJob(jobDetail)
.build();
}
}
} else {
if (jobDetail != null) {
if (triggerAttributes.containsKey(GrailsJobClassConstants.CRON_EXPRESSION)) {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis()))
.forJob(jobDetail)
.withSchedule(buildCronSchedule())
.build();
} else {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis()))
.forJob(jobDetail)
.build();
}
} else {
if (triggerAttributes.containsKey(GrailsJobClassConstants.CRON_EXPRESSION)) {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis()))
.forJob((String)triggerAttributes.get("name").toString())
.withSchedule(buildCronSchedule())
.build();
} else {
customTrigger = newTrigger()
.withIdentity((String)triggerAttributes.get("name"),(String)triggerAttributes.get("group"))
.startAt(new Date(System.currentTimeMillis()))
.forJob((String)triggerAttributes.get("name").toString())
.build();
}
}
}
BeanWrapper customTriggerWrapper = PropertyAccessorFactory.forBeanPropertyAccess(customTrigger);
customTriggerWrapper.registerCustomEditor(String.class, new StringEditor());
customTriggerWrapper.setPropertyValues(triggerAttributes);
}
/**
* Allow the cronExpression to be passed as either a string or instance of CronExpression
*
* @return an instance of CronScheduleBuilder
*/
private CronScheduleBuilder buildCronSchedule() {
if (triggerAttributes.get(GrailsJobClassConstants.CRON_EXPRESSION) instanceof CronExpression) {
return cronSchedule((CronExpression)triggerAttributes.get(GrailsJobClassConstants.CRON_EXPRESSION));
}
return cronSchedule((String)triggerAttributes.get(GrailsJobClassConstants.CRON_EXPRESSION));
}
/**
* {@inheritDoc}
*
* @see org.springframework.beans.factory.FactoryBean#getObject()
*/
public Object getObject() throws Exception {
return customTrigger;
}
/**
* {@inheritDoc}
*
* @see org.springframework.beans.factory.FactoryBean#getObjectType()
*/
public Class getObjectType() {
return triggerClass;
}
/**
* {@inheritDoc}
*
* @see org.springframework.beans.factory.FactoryBean#isSingleton()
*/
public boolean isSingleton() {
return true;
}
public void setJobDetail(JobDetailImpl jobDetail) {
this.jobDetail = jobDetail;
}
public void setTriggerClass(Class<Trigger> triggerClass) {
this.triggerClass = triggerClass;
}
public void setTriggerAttributes(Map triggerAttributes) {
this.triggerAttributes = triggerAttributes;
}
}
// We need this additional editor to support GString -> String convertion for trigger's properties.
class StringEditor extends PropertyEditorSupport {
@Override
public void setValue(Object value) {
super.setValue(value == null ? null : value.toString());
}
@Override
public void setAsText(String text) throws IllegalArgumentException {
setValue(text);
}
}
| |
package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.LocalAddress;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.Property;
import org.jgroups.conf.PropertyConverters;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Util;
import java.io.*;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.util.*;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
/**
* Catches SUSPECT events traveling up the stack. Verifies that the suspected member is really dead. If yes,
* passes SUSPECT event up the stack, otherwise discards it. Has to be placed somewhere above the FD layer and
* below the GMS layer (receiver of the SUSPECT event). Note that SUSPECT events may be reordered by this protocol.
* @author Bela Ban
*/
@MBean(description="Double-checks suspicions reports")
public class VERIFY_SUSPECT extends Protocol implements Runnable {
/* ------------------------------------------ Properties ------------------------------------------ */
@Property(description="Number of millisecs to wait for a response from a suspected member")
protected long timeout=2000;
@Property(description="Number of verify heartbeats sent to a suspected member")
protected int num_msgs=1;
@Property(description="Use InetAddress.isReachable() to verify suspected member instead of regular messages")
protected boolean use_icmp=false;
@Property(description="Send the I_AM_NOT_DEAD message back as a multicast rather than as multiple unicasts " +
"(default is false)")
protected boolean use_mcast_rsps=false;
@LocalAddress
@Property(description="Interface for ICMP pings. Used if use_icmp is true " +
"The following special values are also recognized: GLOBAL, SITE_LOCAL, LINK_LOCAL and NON_LOOPBACK",
systemProperty={Global.BIND_ADDR})
protected InetAddress bind_addr; // interface for ICMP pings
@Property(name="bind_interface", converter=PropertyConverters.BindInterface.class,
description="The interface (NIC) which should be used by this transport", dependsUpon="bind_addr")
protected String bind_interface_str=null;
/* --------------------------------------------- Fields ------------------------------------------------ */
/** network interface to be used to send the ICMP packets */
protected NetworkInterface intf=null;
protected Address local_addr=null;
// a list of suspects, ordered by time when a SUSPECT event needs to be sent up
protected final DelayQueue<Entry> suspects=new DelayQueue<Entry>();
protected Thread timer=null;
public VERIFY_SUSPECT() {
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.SET_LOCAL_ADDRESS:
local_addr=(Address)evt.getArg();
break;
case Event.VIEW_CHANGE:
View v=(View)evt.getArg();
adjustSuspectedMembers(v.getMembers());
break;
}
return down_prot.down(evt);
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.SUSPECT: // it all starts here ...
Address suspected_mbr=(Address)evt.getArg();
if(suspected_mbr == null) {
if(log.isErrorEnabled()) log.error("suspected member is null");
return null;
}
if(local_addr != null && local_addr.equals(suspected_mbr)) {
if(log.isTraceEnabled())
log.trace("I was suspected; ignoring SUSPECT message");
return null;
}
if(!use_icmp)
verifySuspect(suspected_mbr);
else
verifySuspectWithICMP(suspected_mbr);
return null; // don't pass up; we will decide later (after verification) whether to pass it up
case Event.MSG:
Message msg=(Message)evt.getArg();
VerifyHeader hdr=(VerifyHeader)msg.getHeader(this.id);
if(hdr == null)
break;
switch(hdr.type) {
case VerifyHeader.ARE_YOU_DEAD:
if(hdr.from == null) {
if(log.isErrorEnabled()) log.error("ARE_YOU_DEAD: hdr.from is null");
}
else {
Message rsp;
Address target=use_mcast_rsps? null : hdr.from;
for(int i=0; i < num_msgs; i++) {
rsp=new Message(target).setFlag(Message.Flag.INTERNAL)
.putHeader(this.id, new VerifyHeader(VerifyHeader.I_AM_NOT_DEAD, local_addr));
down_prot.down(new Event(Event.MSG, rsp));
}
}
return null;
case VerifyHeader.I_AM_NOT_DEAD:
if(hdr.from == null) {
if(log.isErrorEnabled()) log.error("I_AM_NOT_DEAD: hdr.from is null");
return null;
}
unsuspect(hdr.from);
return null;
}
return null;
case Event.CONFIG:
if(bind_addr == null) {
Map<String,Object> config=(Map<String,Object>)evt.getArg();
bind_addr=(InetAddress)config.get("bind_addr");
}
}
return up_prot.up(evt);
}
/**
* Removes all elements from suspects that are <em>not</em> in the new membership
*/
protected void adjustSuspectedMembers(List<Address> new_mbrship) {
synchronized(suspects) {
for(Iterator<Entry> it=suspects.iterator(); it.hasNext();) {
Entry entry=it.next();
if(!new_mbrship.contains(entry.suspect))
it.remove();
}
}
}
/**
* Started when a suspected member is added to suspects. Iterates over the queue as long as there are suspects in
* it and removes a suspect when the timeout for it has elapsed. Sends up a SUSPECT event for every removed suspect.
* When a suspected member is un-suspected, the member is removed from the queue.
*/
public void run() {
while(!suspects.isEmpty() && timer != null) {
try {
Entry entry=suspects.poll(timeout * 2,TimeUnit.MILLISECONDS);
if(entry != null) {
if(log.isTraceEnabled())
log.trace(entry.suspect + " is dead (passing up SUSPECT event)");
up_prot.up(new Event(Event.SUSPECT, entry.suspect));
}
}
catch(InterruptedException e) {
}
}
}
/* --------------------------------- Private Methods ----------------------------------- */
/**
* Sends ARE_YOU_DEAD message to suspected_mbr, wait for return or timeout
*/
void verifySuspect(Address mbr) {
Message msg;
if(mbr == null) return;
addSuspect(mbr);
startTimer(); // start timer before we send out are you dead messages
// moved out of synchronized statement (bela): http://jira.jboss.com/jira/browse/JGRP-302
if(log.isTraceEnabled()) log.trace("verifying that " + mbr + " is dead");
for(int i=0; i < num_msgs; i++) {
msg=new Message(mbr).setFlag(Message.Flag.INTERNAL)
.putHeader(this.id, new VerifyHeader(VerifyHeader.ARE_YOU_DEAD, local_addr));
down_prot.down(new Event(Event.MSG, msg));
}
}
void verifySuspectWithICMP(Address suspected_mbr) {
InetAddress host=suspected_mbr instanceof IpAddress? ((IpAddress)suspected_mbr).getIpAddress() : null;
if(host == null)
throw new IllegalArgumentException("suspected_mbr is not of type IpAddress - FD_ICMP only works with these");
try {
if(log.isTraceEnabled())
log.trace("pinging host " + suspected_mbr + " using interface " + intf);
long start=System.currentTimeMillis(), stop;
boolean rc=host.isReachable(intf, 0, (int)timeout);
stop=System.currentTimeMillis();
if(rc) { // success
if(log.isTraceEnabled())
log.trace("successfully received response from " + host + " (after " + (stop-start) + "ms)");
}
else { // failure
if(log.isTraceEnabled())
log.debug("could not ping " + suspected_mbr + " after " + (stop-start) + "ms; " +
"passing up SUSPECT event");
removeSuspect(suspected_mbr);
up_prot.up(new Event(Event.SUSPECT, suspected_mbr));
}
}
catch(Exception ex) {
if(log.isErrorEnabled())
log.error("failed pinging " + suspected_mbr, ex);
}
}
protected boolean addSuspect(Address suspect) {
if(suspect == null)
return false;
synchronized(suspects) {
for(Entry entry: suspects) // check for duplicates
if(entry.suspect.equals(suspect))
return false;
suspects.add(new Entry(suspect, System.currentTimeMillis() + timeout));
return true;
}
}
protected boolean removeSuspect(Address suspect) {
if(suspect == null)
return false;
boolean retval=false;
synchronized(suspects) {
for(Iterator<Entry> it=suspects.iterator(); it.hasNext();) {
Entry entry=it.next();
if(entry.suspect.equals(suspect)) {
it.remove();
retval=true; // don't break, possibly remove more (2nd line of defense)
}
}
}
return retval;
}
public void unsuspect(Address mbr) {
boolean removed=mbr != null && removeSuspect(mbr);
if(removed) {
if(log.isTraceEnabled()) log.trace("member " + mbr + " was unsuspected");
down_prot.down(new Event(Event.UNSUSPECT, mbr));
up_prot.up(new Event(Event.UNSUSPECT, mbr));
}
}
protected synchronized void startTimer() {
if(timer == null || !timer.isAlive()) {
timer=getThreadFactory().newThread(this,"VERIFY_SUSPECT.TimerThread");
timer.setDaemon(true);
timer.start();
}
}
public void init() throws Exception {
super.init();
if(bind_addr != null)
intf=NetworkInterface.getByInetAddress(bind_addr);
}
public synchronized void stop() {
Thread tmp;
if(timer != null && timer.isAlive()) {
tmp=timer;
timer=null;
tmp.interrupt();
tmp=null;
}
timer=null;
}
/* ----------------------------- End of Private Methods -------------------------------- */
protected class Entry implements Delayed {
protected final Address suspect;
protected final long target_time;
public Entry(Address suspect, long target_time) {
this.suspect=suspect;
this.target_time=target_time;
}
public int compareTo(Delayed o) {
Entry other=(Entry)o;
long my_delay=getDelay(TimeUnit.MILLISECONDS), other_delay=other.getDelay(TimeUnit.MILLISECONDS);
return my_delay < other_delay ? -1 : my_delay > other_delay? 1 : 0;
}
public long getDelay(TimeUnit unit) {
long delay=target_time - System.currentTimeMillis();
return unit.convert(delay, TimeUnit.MILLISECONDS);
}
public String toString() {
return suspect + ": " + target_time;
}
}
public static class VerifyHeader extends Header {
static final short ARE_YOU_DEAD=1; // 'from' is sender of verify msg
static final short I_AM_NOT_DEAD=2; // 'from' is suspected member
short type=ARE_YOU_DEAD;
Address from=null; // member who wants to verify that suspected_mbr is dead
public VerifyHeader() {
} // used for externalization
VerifyHeader(short type) {
this.type=type;
}
VerifyHeader(short type, Address from) {
this(type);
this.from=from;
}
public String toString() {
switch(type) {
case ARE_YOU_DEAD:
return "[VERIFY_SUSPECT: ARE_YOU_DEAD]";
case I_AM_NOT_DEAD:
return "[VERIFY_SUSPECT: I_AM_NOT_DEAD]";
default:
return "[VERIFY_SUSPECT: unknown type (" + type + ")]";
}
}
public void writeTo(DataOutput out) throws Exception {
out.writeShort(type);
Util.writeAddress(from, out);
}
public void readFrom(DataInput in) throws Exception {
type=in.readShort();
from=Util.readAddress(in);
}
public int size() {
return Global.SHORT_SIZE + Util.size(from);
}
}
}
| |
/*
* Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.security.cert;
import java.io.InputStream;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.security.Provider;
import java.security.Security;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import sun.security.jca.*;
import sun.security.jca.GetInstance.Instance;
/**
* This class defines the functionality of a certificate factory, which is
* used to generate certificate, certification path ({@code CertPath})
* and certificate revocation list (CRL) objects from their encodings.
*
* <p>For encodings consisting of multiple certificates, use
* {@code generateCertificates} when you want to
* parse a collection of possibly unrelated certificates. Otherwise,
* use {@code generateCertPath} when you want to generate
* a {@code CertPath} (a certificate chain) and subsequently
* validate it with a {@code CertPathValidator}.
*
* <p>A certificate factory for X.509 must return certificates that are an
* instance of {@code java.security.cert.X509Certificate}, and CRLs
* that are an instance of {@code java.security.cert.X509CRL}.
*
* <p>The following example reads a file with Base64 encoded certificates,
* which are each bounded at the beginning by -----BEGIN CERTIFICATE-----, and
* bounded at the end by -----END CERTIFICATE-----. We convert the
* {@code FileInputStream} (which does not support {@code mark}
* and {@code reset}) to a {@code BufferedInputStream} (which
* supports those methods), so that each call to
* {@code generateCertificate} consumes only one certificate, and the
* read position of the input stream is positioned to the next certificate in
* the file:
*
* <pre>{@code
* FileInputStream fis = new FileInputStream(filename);
* BufferedInputStream bis = new BufferedInputStream(fis);
*
* CertificateFactory cf = CertificateFactory.getInstance("X.509");
*
* while (bis.available() > 0) {
* Certificate cert = cf.generateCertificate(bis);
* System.out.println(cert.toString());
* }
* }</pre>
*
* <p>The following example parses a PKCS#7-formatted certificate reply stored
* in a file and extracts all the certificates from it:
*
* <pre>
* FileInputStream fis = new FileInputStream(filename);
* CertificateFactory cf = CertificateFactory.getInstance("X.509");
* Collection c = cf.generateCertificates(fis);
* Iterator i = c.iterator();
* while (i.hasNext()) {
* Certificate cert = (Certificate)i.next();
* System.out.println(cert);
* }
* </pre>
*
* <p> Every implementation of the Java platform is required to support the
* following standard {@code CertificateFactory} type:
* <ul>
* <li>{@code X.509}</li>
* </ul>
* and the following standard {@code CertPath} encodings:
* <ul>
* <li>{@code PKCS7}</li>
* <li>{@code PkiPath}</li>
* </ul>
* The type and encodings are described in the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertificateFactory">
* CertificateFactory section</a> and the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertPathEncodings">
* CertPath Encodings section</a> of the
* Java Cryptography Architecture Standard Algorithm Name Documentation.
* Consult the release documentation for your implementation to see if any
* other types or encodings are supported.
*
* @author Hemma Prafullchandra
* @author Jan Luehe
* @author Sean Mullan
*
* @see Certificate
* @see X509Certificate
* @see CertPath
* @see CRL
* @see X509CRL
*
* @since 1.2
*/
public class CertificateFactory {
// The certificate type
private String type;
// The provider
private Provider provider;
// The provider implementation
private CertificateFactorySpi certFacSpi;
/**
* Creates a CertificateFactory object of the given type, and encapsulates
* the given provider implementation (SPI object) in it.
*
* @param certFacSpi the provider implementation.
* @param provider the provider.
* @param type the certificate type.
*/
protected CertificateFactory(CertificateFactorySpi certFacSpi,
Provider provider, String type)
{
this.certFacSpi = certFacSpi;
this.provider = provider;
this.type = type;
}
/**
* Returns a certificate factory object that implements the
* specified certificate type.
*
* <p> This method traverses the list of registered security Providers,
* starting with the most preferred Provider.
* A new CertificateFactory object encapsulating the
* CertificateFactorySpi implementation from the first
* Provider that supports the specified type is returned.
*
* <p> Note that the list of registered providers may be retrieved via
* the {@link Security#getProviders() Security.getProviders()} method.
*
* @param type the name of the requested certificate type.
* See the CertificateFactory section in the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertificateFactory">
* Java Cryptography Architecture Standard Algorithm Name Documentation</a>
* for information about standard certificate types.
*
* @return a certificate factory object for the specified type.
*
* @exception CertificateException if no Provider supports a
* CertificateFactorySpi implementation for the
* specified type.
*
* @see Provider
*/
public static final CertificateFactory getInstance(String type)
throws CertificateException {
try {
Instance instance = GetInstance.getInstance("CertificateFactory",
CertificateFactorySpi.class, type);
return new CertificateFactory((CertificateFactorySpi)instance.impl,
instance.provider, type);
} catch (NoSuchAlgorithmException e) {
throw new CertificateException(type + " not found", e);
}
}
/**
* Returns a certificate factory object for the specified
* certificate type.
*
* <p> A new CertificateFactory object encapsulating the
* CertificateFactorySpi implementation from the specified provider
* is returned. The specified provider must be registered
* in the security provider list.
*
* <p> Note that the list of registered providers may be retrieved via
* the {@link Security#getProviders() Security.getProviders()} method.
*
* @param type the certificate type.
* See the CertificateFactory section in the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertificateFactory">
* Java Cryptography Architecture Standard Algorithm Name Documentation</a>
* for information about standard certificate types.
*
* @param provider the name of the provider.
*
* @return a certificate factory object for the specified type.
*
* @exception CertificateException if a CertificateFactorySpi
* implementation for the specified algorithm is not
* available from the specified provider.
*
* @exception NoSuchProviderException if the specified provider is not
* registered in the security provider list.
*
* @exception IllegalArgumentException if the provider name is null
* or empty.
*
* @see Provider
*/
public static final CertificateFactory getInstance(String type,
String provider) throws CertificateException,
NoSuchProviderException {
try {
Instance instance = GetInstance.getInstance("CertificateFactory",
CertificateFactorySpi.class, type, provider);
return new CertificateFactory((CertificateFactorySpi)instance.impl,
instance.provider, type);
} catch (NoSuchAlgorithmException e) {
throw new CertificateException(type + " not found", e);
}
}
/**
* Returns a certificate factory object for the specified
* certificate type.
*
* <p> A new CertificateFactory object encapsulating the
* CertificateFactorySpi implementation from the specified Provider
* object is returned. Note that the specified Provider object
* does not have to be registered in the provider list.
*
* @param type the certificate type.
* See the CertificateFactory section in the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertificateFactory">
* Java Cryptography Architecture Standard Algorithm Name Documentation</a>
* for information about standard certificate types.
* @param provider the provider.
*
* @return a certificate factory object for the specified type.
*
* @exception CertificateException if a CertificateFactorySpi
* implementation for the specified algorithm is not available
* from the specified Provider object.
*
* @exception IllegalArgumentException if the {@code provider} is
* null.
*
* @see Provider
*
* @since 1.4
*/
public static final CertificateFactory getInstance(String type,
Provider provider) throws CertificateException {
try {
Instance instance = GetInstance.getInstance("CertificateFactory",
CertificateFactorySpi.class, type, provider);
return new CertificateFactory((CertificateFactorySpi)instance.impl,
instance.provider, type);
} catch (NoSuchAlgorithmException e) {
throw new CertificateException(type + " not found", e);
}
}
/**
* Returns the provider of this certificate factory.
*
* @return the provider of this certificate factory.
*/
public final Provider getProvider() {
return this.provider;
}
/**
* Returns the name of the certificate type associated with this
* certificate factory.
*
* @return the name of the certificate type associated with this
* certificate factory.
*/
public final String getType() {
return this.type;
}
/**
* Generates a certificate object and initializes it with
* the data read from the input stream {@code inStream}.
*
* <p>In order to take advantage of the specialized certificate format
* supported by this certificate factory,
* the returned certificate object can be typecast to the corresponding
* certificate class. For example, if this certificate
* factory implements X.509 certificates, the returned certificate object
* can be typecast to the {@code X509Certificate} class.
*
* <p>In the case of a certificate factory for X.509 certificates, the
* certificate provided in {@code inStream} must be DER-encoded and
* may be supplied in binary or printable (Base64) encoding. If the
* certificate is provided in Base64 encoding, it must be bounded at
* the beginning by -----BEGIN CERTIFICATE-----, and must be bounded at
* the end by -----END CERTIFICATE-----.
*
* <p>Note that if the given input stream does not support
* {@link InputStream#mark(int) mark} and
* {@link InputStream#reset() reset}, this method will
* consume the entire input stream. Otherwise, each call to this
* method consumes one certificate and the read position of the
* input stream is positioned to the next available byte after
* the inherent end-of-certificate marker. If the data in the input stream
* does not contain an inherent end-of-certificate marker (other
* than EOF) and there is trailing data after the certificate is parsed, a
* {@code CertificateException} is thrown.
*
* @param inStream an input stream with the certificate data.
*
* @return a certificate object initialized with the data
* from the input stream.
*
* @exception CertificateException on parsing errors.
*/
public final Certificate generateCertificate(InputStream inStream)
throws CertificateException
{
return certFacSpi.engineGenerateCertificate(inStream);
}
/**
* Returns an iteration of the {@code CertPath} encodings supported
* by this certificate factory, with the default encoding first. See
* the CertPath Encodings section in the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertPathEncodings">
* Java Cryptography Architecture Standard Algorithm Name Documentation</a>
* for information about standard encoding names and their formats.
* <p>
* Attempts to modify the returned {@code Iterator} via its
* {@code remove} method result in an
* {@code UnsupportedOperationException}.
*
* @return an {@code Iterator} over the names of the supported
* {@code CertPath} encodings (as {@code String}s)
* @since 1.4
*/
public final Iterator<String> getCertPathEncodings() {
return(certFacSpi.engineGetCertPathEncodings());
}
/**
* Generates a {@code CertPath} object and initializes it with
* the data read from the {@code InputStream} inStream. The data
* is assumed to be in the default encoding. The name of the default
* encoding is the first element of the {@code Iterator} returned by
* the {@link #getCertPathEncodings getCertPathEncodings} method.
*
* @param inStream an {@code InputStream} containing the data
* @return a {@code CertPath} initialized with the data from the
* {@code InputStream}
* @exception CertificateException if an exception occurs while decoding
* @since 1.4
*/
public final CertPath generateCertPath(InputStream inStream)
throws CertificateException
{
return(certFacSpi.engineGenerateCertPath(inStream));
}
/**
* Generates a {@code CertPath} object and initializes it with
* the data read from the {@code InputStream} inStream. The data
* is assumed to be in the specified encoding. See
* the CertPath Encodings section in the <a href=
* "{@docRoot}/../technotes/guides/security/StandardNames.html#CertPathEncodings">
* Java Cryptography Architecture Standard Algorithm Name Documentation</a>
* for information about standard encoding names and their formats.
*
* @param inStream an {@code InputStream} containing the data
* @param encoding the encoding used for the data
* @return a {@code CertPath} initialized with the data from the
* {@code InputStream}
* @exception CertificateException if an exception occurs while decoding or
* the encoding requested is not supported
* @since 1.4
*/
public final CertPath generateCertPath(InputStream inStream,
String encoding) throws CertificateException
{
return(certFacSpi.engineGenerateCertPath(inStream, encoding));
}
/**
* Generates a {@code CertPath} object and initializes it with
* a {@code List} of {@code Certificate}s.
* <p>
* The certificates supplied must be of a type supported by the
* {@code CertificateFactory}. They will be copied out of the supplied
* {@code List} object.
*
* @param certificates a {@code List} of {@code Certificate}s
* @return a {@code CertPath} initialized with the supplied list of
* certificates
* @exception CertificateException if an exception occurs
* @since 1.4
*/
public final CertPath
generateCertPath(List<? extends Certificate> certificates)
throws CertificateException
{
return(certFacSpi.engineGenerateCertPath(certificates));
}
/**
* Returns a (possibly empty) collection view of the certificates read
* from the given input stream {@code inStream}.
*
* <p>In order to take advantage of the specialized certificate format
* supported by this certificate factory, each element in
* the returned collection view can be typecast to the corresponding
* certificate class. For example, if this certificate
* factory implements X.509 certificates, the elements in the returned
* collection can be typecast to the {@code X509Certificate} class.
*
* <p>In the case of a certificate factory for X.509 certificates,
* {@code inStream} may contain a sequence of DER-encoded certificates
* in the formats described for
* {@link #generateCertificate(InputStream) generateCertificate}.
* In addition, {@code inStream} may contain a PKCS#7 certificate
* chain. This is a PKCS#7 <i>SignedData</i> object, with the only
* significant field being <i>certificates</i>. In particular, the
* signature and the contents are ignored. This format allows multiple
* certificates to be downloaded at once. If no certificates are present,
* an empty collection is returned.
*
* <p>Note that if the given input stream does not support
* {@link InputStream#mark(int) mark} and
* {@link InputStream#reset() reset}, this method will
* consume the entire input stream.
*
* @param inStream the input stream with the certificates.
*
* @return a (possibly empty) collection view of
* java.security.cert.Certificate objects
* initialized with the data from the input stream.
*
* @exception CertificateException on parsing errors.
*/
public final Collection<? extends Certificate> generateCertificates
(InputStream inStream) throws CertificateException {
return certFacSpi.engineGenerateCertificates(inStream);
}
/**
* Generates a certificate revocation list (CRL) object and initializes it
* with the data read from the input stream {@code inStream}.
*
* <p>In order to take advantage of the specialized CRL format
* supported by this certificate factory,
* the returned CRL object can be typecast to the corresponding
* CRL class. For example, if this certificate
* factory implements X.509 CRLs, the returned CRL object
* can be typecast to the {@code X509CRL} class.
*
* <p>Note that if the given input stream does not support
* {@link InputStream#mark(int) mark} and
* {@link InputStream#reset() reset}, this method will
* consume the entire input stream. Otherwise, each call to this
* method consumes one CRL and the read position of the input stream
* is positioned to the next available byte after the inherent
* end-of-CRL marker. If the data in the
* input stream does not contain an inherent end-of-CRL marker (other
* than EOF) and there is trailing data after the CRL is parsed, a
* {@code CRLException} is thrown.
*
* @param inStream an input stream with the CRL data.
*
* @return a CRL object initialized with the data
* from the input stream.
*
* @exception CRLException on parsing errors.
*/
public final CRL generateCRL(InputStream inStream)
throws CRLException
{
return certFacSpi.engineGenerateCRL(inStream);
}
/**
* Returns a (possibly empty) collection view of the CRLs read
* from the given input stream {@code inStream}.
*
* <p>In order to take advantage of the specialized CRL format
* supported by this certificate factory, each element in
* the returned collection view can be typecast to the corresponding
* CRL class. For example, if this certificate
* factory implements X.509 CRLs, the elements in the returned
* collection can be typecast to the {@code X509CRL} class.
*
* <p>In the case of a certificate factory for X.509 CRLs,
* {@code inStream} may contain a sequence of DER-encoded CRLs.
* In addition, {@code inStream} may contain a PKCS#7 CRL
* set. This is a PKCS#7 <i>SignedData</i> object, with the only
* significant field being <i>crls</i>. In particular, the
* signature and the contents are ignored. This format allows multiple
* CRLs to be downloaded at once. If no CRLs are present,
* an empty collection is returned.
*
* <p>Note that if the given input stream does not support
* {@link InputStream#mark(int) mark} and
* {@link InputStream#reset() reset}, this method will
* consume the entire input stream.
*
* @param inStream the input stream with the CRLs.
*
* @return a (possibly empty) collection view of
* java.security.cert.CRL objects initialized with the data from the input
* stream.
*
* @exception CRLException on parsing errors.
*/
public final Collection<? extends CRL> generateCRLs(InputStream inStream)
throws CRLException {
return certFacSpi.engineGenerateCRLs(inStream);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/enums/account_budget_proposal_type.proto
package com.google.ads.googleads.v8.enums;
/**
* <pre>
* Message describing AccountBudgetProposal types.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum}
*/
public final class AccountBudgetProposalTypeEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum)
AccountBudgetProposalTypeEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use AccountBudgetProposalTypeEnum.newBuilder() to construct.
private AccountBudgetProposalTypeEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AccountBudgetProposalTypeEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new AccountBudgetProposalTypeEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AccountBudgetProposalTypeEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeProto.internal_static_google_ads_googleads_v8_enums_AccountBudgetProposalTypeEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeProto.internal_static_google_ads_googleads_v8_enums_AccountBudgetProposalTypeEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.class, com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.Builder.class);
}
/**
* <pre>
* The possible types of an AccountBudgetProposal.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.AccountBudgetProposalType}
*/
public enum AccountBudgetProposalType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Identifies a request to create a new budget.
* </pre>
*
* <code>CREATE = 2;</code>
*/
CREATE(2),
/**
* <pre>
* Identifies a request to edit an existing budget.
* </pre>
*
* <code>UPDATE = 3;</code>
*/
UPDATE(3),
/**
* <pre>
* Identifies a request to end a budget that has already started.
* </pre>
*
* <code>END = 4;</code>
*/
END(4),
/**
* <pre>
* Identifies a request to remove a budget that hasn't started yet.
* </pre>
*
* <code>REMOVE = 5;</code>
*/
REMOVE(5),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Identifies a request to create a new budget.
* </pre>
*
* <code>CREATE = 2;</code>
*/
public static final int CREATE_VALUE = 2;
/**
* <pre>
* Identifies a request to edit an existing budget.
* </pre>
*
* <code>UPDATE = 3;</code>
*/
public static final int UPDATE_VALUE = 3;
/**
* <pre>
* Identifies a request to end a budget that has already started.
* </pre>
*
* <code>END = 4;</code>
*/
public static final int END_VALUE = 4;
/**
* <pre>
* Identifies a request to remove a budget that hasn't started yet.
* </pre>
*
* <code>REMOVE = 5;</code>
*/
public static final int REMOVE_VALUE = 5;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AccountBudgetProposalType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static AccountBudgetProposalType forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return CREATE;
case 3: return UPDATE;
case 4: return END;
case 5: return REMOVE;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<AccountBudgetProposalType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
AccountBudgetProposalType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<AccountBudgetProposalType>() {
public AccountBudgetProposalType findValueByNumber(int number) {
return AccountBudgetProposalType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.getDescriptor().getEnumTypes().get(0);
}
private static final AccountBudgetProposalType[] VALUES = values();
public static AccountBudgetProposalType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private AccountBudgetProposalType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.AccountBudgetProposalType)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum other = (com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Message describing AccountBudgetProposal types.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum)
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeProto.internal_static_google_ads_googleads_v8_enums_AccountBudgetProposalTypeEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeProto.internal_static_google_ads_googleads_v8_enums_AccountBudgetProposalTypeEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.class, com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeProto.internal_static_google_ads_googleads_v8_enums_AccountBudgetProposalTypeEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum build() {
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum buildPartial() {
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum result = new com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum) {
return mergeFrom((com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum other) {
if (other == com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum)
private static final com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum();
}
public static com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AccountBudgetProposalTypeEnum>
PARSER = new com.google.protobuf.AbstractParser<AccountBudgetProposalTypeEnum>() {
@java.lang.Override
public AccountBudgetProposalTypeEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AccountBudgetProposalTypeEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<AccountBudgetProposalTypeEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AccountBudgetProposalTypeEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.enums.AccountBudgetProposalTypeEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to GraphHopper and Peter Karich under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import com.graphhopper.reader.OSMRelation;
import com.graphhopper.reader.OSMWay;
import com.graphhopper.reader.Relation;
import com.graphhopper.reader.Way;
import com.graphhopper.util.BitUtil;
/**
*
* @author Peter Karich
*/
public class EncodingManagerTest
{
@Test
public void testCompatibility()
{
EncodingManager manager = new EncodingManager("CAR,BIKE,FOOT");
BikeFlagEncoder bike = (BikeFlagEncoder) manager.getEncoder("BIKE");
CarFlagEncoder car = (CarFlagEncoder) manager.getEncoder("CAR");
FootFlagEncoder foot = (FootFlagEncoder) manager.getEncoder("FOOT");
assertNotEquals(car, bike);
assertNotEquals(car, foot);
assertNotEquals(car.hashCode(), bike.hashCode());
assertNotEquals(car.hashCode(), foot.hashCode());
FootFlagEncoder foot2 = new FootFlagEncoder();
EncodingManager manager2 = new EncodingManager(foot2);
assertNotEquals(foot, foot2);
assertNotEquals(foot.hashCode(), foot2.hashCode());
FootFlagEncoder foot3 = new FootFlagEncoder();
EncodingManager manager3 = new EncodingManager(foot3);
assertEquals(foot3, foot2);
assertEquals(foot3.hashCode(), foot2.hashCode());
}
@Test
public void testEncoderAcceptNoException()
{
EncodingManager manager = new EncodingManager("CAR");
assertTrue(manager.supports("CAR"));
assertFalse(manager.supports("FOOT"));
}
@Test
public void testWrongEncoders()
{
try
{
FootFlagEncoder foot = new FootFlagEncoder();
new EncodingManager(foot, foot);
assertTrue(false);
} catch (Exception ex)
{
assertEquals("You must not register a FlagEncoder (foot) twice!", ex.getMessage());
}
try
{
new EncodingManager(new FootFlagEncoder(), new CarFlagEncoder(), new BikeFlagEncoder(), new MountainBikeFlagEncoder(), new RacingBikeFlagEncoder());
assertTrue(false);
} catch (Exception ex)
{
assertTrue(ex.getMessage(), ex.getMessage().startsWith("Encoders are requesting more than 32 bits of way flags. Decrease the"));
}
}
@Test
public void testCombineRelations()
{
OSMWay osmWay = new OSMWay(1);
osmWay.setTag("highway", "track");
OSMRelation osmRel = new OSMRelation(1);
BikeFlagEncoder defaultBike = new BikeFlagEncoder();
BikeFlagEncoder lessRelationCodes = new BikeFlagEncoder()
{
@Override
public int defineRelationBits( int index, int shift )
{
relationCodeEncoder = new EncodedValue("RelationCode2", shift, 2, 1, 0, 3);
return shift + 2;
}
@Override
public long handleRelationTags( Relation relation, long oldRelFlags )
{
if (relation.hasTag("route", "bicycle"))
return relationCodeEncoder.setValue(0, 2);
return relationCodeEncoder.setValue(0, 0);
}
@Override
protected int handlePriority( Way way, int priorityFromRelation )
{
return priorityFromRelation;
}
@Override
public String toString()
{
return "lessRelationsBits";
}
};
EncodingManager manager = new EncodingManager(defaultBike, lessRelationCodes);
// relation code is PREFER
osmRel.setTag("route", "bicycle");
osmRel.setTag("network", "lcn");
long relFlags = manager.handleRelationTags(osmRel, 0);
long allow = defaultBike.acceptBit | lessRelationCodes.acceptBit;
long flags = manager.handleWayTags(osmWay, allow, relFlags);
assertTrue(defaultBike.getDouble(flags, PriorityWeighting.KEY)
> lessRelationCodes.getDouble(flags, PriorityWeighting.KEY));
}
@Test
public void testMixBikeTypesAndRelationCombination()
{
OSMWay osmWay = new OSMWay(1);
osmWay.setTag("highway", "track");
osmWay.setTag("tracktype", "grade1");
OSMRelation osmRel = new OSMRelation(1);
BikeFlagEncoder bikeEncoder = new BikeFlagEncoder();
MountainBikeFlagEncoder mtbEncoder = new MountainBikeFlagEncoder();
EncodingManager manager = new EncodingManager(bikeEncoder, mtbEncoder);
// relation code for network rcn is VERY_NICE for bike and PREFER for mountainbike
osmRel.setTag("route", "bicycle");
osmRel.setTag("network", "rcn");
long relFlags = manager.handleRelationTags(osmRel, 0);
long allow = bikeEncoder.acceptBit | mtbEncoder.acceptBit;
long flags = manager.handleWayTags(osmWay, allow, relFlags);
// bike: uninfluenced speed for grade but via network => VERY_NICE
// mtb: uninfluenced speed only PREFER
assertTrue(bikeEncoder.getDouble(flags, PriorityWeighting.KEY)
> mtbEncoder.getDouble(flags, PriorityWeighting.KEY));
}
public void testFullBitMask()
{
BitUtil bitUtil = BitUtil.LITTLE;
EncodingManager manager = new EncodingManager("CAR,FOOT");
AbstractFlagEncoder carr = (AbstractFlagEncoder) manager.getEncoder("CAR");
assertTrue(bitUtil.toBitString(carr.getNodeBitMask()).endsWith("00000000001111111"));
AbstractFlagEncoder foot = (AbstractFlagEncoder) manager.getEncoder("FOOT");
assertTrue(bitUtil.toBitString(foot.getNodeBitMask()).endsWith("00011111110000000"));
}
@Test
public void testFixWayName()
{
assertEquals("B8, B12", EncodingManager.fixWayName("B8;B12"));
assertEquals("B8, B12", EncodingManager.fixWayName("B8; B12"));
}
@Test
public void testCompatibilityBug()
{
EncodingManager manager2 = new EncodingManager("bike2", 8);
OSMWay osmWay = new OSMWay(1);
osmWay.setTag("highway", "footway");
osmWay.setTag("name", "test");
BikeFlagEncoder singleBikeEnc = (BikeFlagEncoder) manager2.getEncoder("bike2");
long flags = manager2.handleWayTags(osmWay, singleBikeEnc.acceptBit, 0);
double singleSpeed = singleBikeEnc.getSpeed(flags);
assertEquals(4, singleSpeed, 1e-3);
assertEquals(singleSpeed, singleBikeEnc.getReverseSpeed(flags), 1e-3);
EncodingManager manager = new EncodingManager("bike2,bike,foot", 8);
FootFlagEncoder foot = (FootFlagEncoder) manager.getEncoder("foot");
BikeFlagEncoder bike = (BikeFlagEncoder) manager.getEncoder("bike2");
long acceptBits = foot.acceptBit | bike.acceptBit;
flags = manager.handleWayTags(osmWay, acceptBits, 0);
assertEquals(singleSpeed, bike.getSpeed(flags), 1e-2);
assertEquals(singleSpeed, bike.getReverseSpeed(flags), 1e-2);
assertEquals(5, foot.getSpeed(flags), 1e-2);
assertEquals(5, foot.getReverseSpeed(flags), 1e-2);
}
@Test
public void testSupportFords()
{
// 1) no encoder crossing fords
String flagEncodersStr = "car,bike,foot";
EncodingManager manager = new EncodingManager(flagEncodersStr, 8);
assertTrue(((AbstractFlagEncoder) manager.getEncoder("car")).isBlockFords());
assertTrue(((AbstractFlagEncoder) manager.getEncoder("bike")).isBlockFords());
assertTrue(((AbstractFlagEncoder) manager.getEncoder("foot")).isBlockFords());
// 2) two encoders crossing fords
flagEncodersStr = "car,bike|blockFords=false,foot|blockFords=false";
manager = new EncodingManager(flagEncodersStr, 8);
assertTrue(((AbstractFlagEncoder) manager.getEncoder("car")).isBlockFords());
assertFalse(((AbstractFlagEncoder) manager.getEncoder("bike")).isBlockFords());
assertFalse(((AbstractFlagEncoder) manager.getEncoder("foot")).isBlockFords());
// 2) Try combined with another tag
flagEncodersStr = "car|turnCosts=true|blockFords=true,bike,foot|blockFords=false";
manager = new EncodingManager(flagEncodersStr, 8);
assertTrue(((AbstractFlagEncoder) manager.getEncoder("car")).isBlockFords());
assertTrue(((AbstractFlagEncoder) manager.getEncoder("bike")).isBlockFords());
assertFalse(((AbstractFlagEncoder) manager.getEncoder("foot")).isBlockFords());
}
}
| |
package org.bouncycastle.operator.jcajce;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Signature;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.Map;
import javax.crypto.Cipher;
import org.bouncycastle.asn1.ASN1Encodable;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.DERNull;
import org.bouncycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
import org.bouncycastle.asn1.kisa.KISAObjectIdentifiers;
import org.bouncycastle.asn1.nist.NISTObjectIdentifiers;
import org.bouncycastle.asn1.ntt.NTTObjectIdentifiers;
import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.asn1.pkcs.RSASSAPSSparams;
import org.bouncycastle.asn1.teletrust.TeleTrusTObjectIdentifiers;
import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
import org.bouncycastle.asn1.x9.X9ObjectIdentifiers;
import org.bouncycastle.cert.X509CertificateHolder;
import org.bouncycastle.jcajce.JcaJceHelper;
import org.bouncycastle.operator.OperatorCreationException;
//import java.security.spec.PSSParameterSpec;
class OperatorHelper
{
private static final Map oids = new HashMap();
private static final Map asymmetricWrapperAlgNames = new HashMap();
private static final Map symmetricWrapperAlgNames = new HashMap();
private static final Map symmetricKeyAlgNames = new HashMap();
static
{
//
// reverse mappings
//
oids.put(new ASN1ObjectIdentifier("1.2.840.113549.1.1.5"), "SHA1WITHRSA");
oids.put(PKCSObjectIdentifiers.sha224WithRSAEncryption, "SHA224WITHRSA");
oids.put(PKCSObjectIdentifiers.sha256WithRSAEncryption, "SHA256WITHRSA");
oids.put(PKCSObjectIdentifiers.sha384WithRSAEncryption, "SHA384WITHRSA");
oids.put(PKCSObjectIdentifiers.sha512WithRSAEncryption, "SHA512WITHRSA");
oids.put(CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_94, "GOST3411WITHGOST3410");
oids.put(CryptoProObjectIdentifiers.gostR3411_94_with_gostR3410_2001, "GOST3411WITHECGOST3410");
oids.put(new ASN1ObjectIdentifier("1.2.840.113549.1.1.4"), "MD5WITHRSA");
oids.put(new ASN1ObjectIdentifier("1.2.840.113549.1.1.2"), "MD2WITHRSA");
oids.put(new ASN1ObjectIdentifier("1.2.840.10040.4.3"), "SHA1WITHDSA");
oids.put(X9ObjectIdentifiers.ecdsa_with_SHA1, "SHA1WITHECDSA");
oids.put(X9ObjectIdentifiers.ecdsa_with_SHA224, "SHA224WITHECDSA");
oids.put(X9ObjectIdentifiers.ecdsa_with_SHA256, "SHA256WITHECDSA");
oids.put(X9ObjectIdentifiers.ecdsa_with_SHA384, "SHA384WITHECDSA");
oids.put(X9ObjectIdentifiers.ecdsa_with_SHA512, "SHA512WITHECDSA");
oids.put(OIWObjectIdentifiers.sha1WithRSA, "SHA1WITHRSA");
oids.put(OIWObjectIdentifiers.dsaWithSHA1, "SHA1WITHDSA");
oids.put(NISTObjectIdentifiers.dsa_with_sha224, "SHA224WITHDSA");
oids.put(NISTObjectIdentifiers.dsa_with_sha256, "SHA256WITHDSA");
asymmetricWrapperAlgNames.put(new ASN1ObjectIdentifier(PKCSObjectIdentifiers.rsaEncryption.getId()), "RSA/ECB/PKCS1Padding");
symmetricWrapperAlgNames.put(PKCSObjectIdentifiers.id_alg_CMS3DESwrap, "DESEDEWrap");
symmetricWrapperAlgNames.put(PKCSObjectIdentifiers.id_alg_CMSRC2wrap, "RC2Wrap");
symmetricWrapperAlgNames.put(NISTObjectIdentifiers.id_aes128_wrap, "AESWrap");
symmetricWrapperAlgNames.put(NISTObjectIdentifiers.id_aes192_wrap, "AESWrap");
symmetricWrapperAlgNames.put(NISTObjectIdentifiers.id_aes256_wrap, "AESWrap");
symmetricWrapperAlgNames.put(NTTObjectIdentifiers.id_camellia128_wrap, "CamelliaWrap");
symmetricWrapperAlgNames.put(NTTObjectIdentifiers.id_camellia192_wrap, "CamelliaWrap");
symmetricWrapperAlgNames.put(NTTObjectIdentifiers.id_camellia256_wrap, "CamelliaWrap");
symmetricWrapperAlgNames.put(KISAObjectIdentifiers.id_npki_app_cmsSeed_wrap, "SEEDWrap");
symmetricWrapperAlgNames.put(PKCSObjectIdentifiers.des_EDE3_CBC, "DESede");
symmetricKeyAlgNames.put(NISTObjectIdentifiers.aes, "AES");
symmetricKeyAlgNames.put(NISTObjectIdentifiers.id_aes128_CBC, "AES");
symmetricKeyAlgNames.put(NISTObjectIdentifiers.id_aes192_CBC, "AES");
symmetricKeyAlgNames.put(NISTObjectIdentifiers.id_aes256_CBC, "AES");
symmetricKeyAlgNames.put(PKCSObjectIdentifiers.des_EDE3_CBC, "DESede");
symmetricKeyAlgNames.put(PKCSObjectIdentifiers.RC2_CBC, "RC2");
}
private JcaJceHelper helper;
OperatorHelper(JcaJceHelper helper)
{
this.helper = helper;
}
Cipher createAsymmetricWrapper(ASN1ObjectIdentifier algorithm, Map extraAlgNames)
throws OperatorCreationException
{
try
{
try
{
String cipherName = null;
if (!extraAlgNames.isEmpty())
{
cipherName = (String)extraAlgNames.get(algorithm);
}
if (cipherName == null)
{
cipherName = (String)asymmetricWrapperAlgNames.get(algorithm);
}
if (cipherName != null)
{
try
{
// this is reversed as the Sun policy files now allow unlimited strength RSA
return helper.createCipher(cipherName);
}
catch (NoSuchAlgorithmException e)
{
// try alternate for RSA
if (cipherName.equals("RSA/ECB/PKCS1Padding"))
{
try
{
return helper.createCipher("RSA/NONE/PKCS1Padding");
}
catch (NoSuchAlgorithmException ex)
{
// Ignore
}
}
// Ignore
}
}
return helper.createCipher(algorithm.getId());
}
catch (NoSuchAlgorithmException e)
{
throw new OperatorCreationException("cannot create cipher: " + e.getMessage(), e);
}
catch (NoSuchProviderException e)
{
throw new OperatorCreationException("cannot create cipher: " + e.getMessage(), e);
}
}
catch (GeneralSecurityException e)
{
throw new OperatorCreationException("cannot create cipher: " + e.getMessage(), e);
}
}
Cipher createSymmetricWrapper(ASN1ObjectIdentifier algorithm)
throws OperatorCreationException
{
try
{
String cipherName = (String)symmetricWrapperAlgNames.get(algorithm);
if (cipherName != null)
{
try
{
// this is reversed as the Sun policy files now allow unlimited strength RSA
return helper.createCipher(cipherName);
}
catch (NoSuchAlgorithmException e)
{
// Ignore
}
}
return helper.createCipher(algorithm.getId());
}
catch (NoSuchAlgorithmException e)
{
throw new OperatorCreationException("cannot create cipher: " + e.getMessage(), e);
}
catch (NoSuchProviderException e)
{
throw new OperatorCreationException("cannot create cipher: " + e.getMessage(), e);
}
catch (GeneralSecurityException e)
{
throw new OperatorCreationException("cannot create cipher: " + e.getMessage(), e);
}
}
MessageDigest createDigest(AlgorithmIdentifier digAlgId)
throws GeneralSecurityException
{
try
{
MessageDigest dig;
try
{
dig = helper.createDigest(getDigestAlgName(digAlgId.getAlgorithm()));
}
catch (NoSuchAlgorithmException e)
{
//
// try an alternate
//
if (oids.get(digAlgId.getAlgorithm()) != null)
{
String digestAlgorithm = (String)oids.get(digAlgId.getAlgorithm());
dig = helper.createDigest(digestAlgorithm);
}
else
{
throw e;
}
}
return dig;
}
catch (NoSuchProviderException e)
{
throw new GeneralSecurityException(e.toString());
}
catch (NoSuchAlgorithmException e)
{
throw new GeneralSecurityException(e.toString());
}
}
Signature createSignature(AlgorithmIdentifier sigAlgId)
throws GeneralSecurityException
{
try
{
Signature sig;
try
{
sig = helper.createSignature(getSignatureName(sigAlgId));
}
catch (NoSuchAlgorithmException e)
{
//
// try an alternate
//
if (oids.get(sigAlgId.getAlgorithm()) != null)
{
String signatureAlgorithm = (String)oids.get(sigAlgId.getAlgorithm());
sig = helper.createSignature(signatureAlgorithm);
}
else
{
throw e;
}
}
return sig;
}
catch (NoSuchProviderException e)
{
throw new GeneralSecurityException(e.toString());
}
catch (NoSuchAlgorithmException e)
{
throw new GeneralSecurityException(e.toString());
}
}
public Signature createRawSignature(AlgorithmIdentifier algorithm)
{
Signature sig;
try
{
String algName = getSignatureName(algorithm);
algName = "NONE" + algName.substring(algName.indexOf("WITH"));
sig = helper.createSignature(algName);
// RFC 4056
// When the id-RSASSA-PSS algorithm identifier is used for a signature,
// the AlgorithmIdentifier parameters field MUST contain RSASSA-PSS-params.
/*
Can;t do this pre-jdk1.4
if (algorithm.getAlgorithm().equals(PKCSObjectIdentifiers.id_RSASSA_PSS))
{
AlgorithmParameters params = helper.createAlgorithmParameters(algName);
params.init(algorithm.getParameters().toASN1Primitive().getEncoded(), "ASN.1");
PSSParameterSpec spec = (PSSParameterSpec)params.getParameterSpec(PSSParameterSpec.class);
sig.setParameter(spec);
}
*/
}
catch (Exception e)
{
return null;
}
return sig;
}
private static String getSignatureName(
AlgorithmIdentifier sigAlgId)
{
ASN1Encodable params = sigAlgId.getParameters();
if (params != null && !DERNull.INSTANCE.equals(params))
{
if (sigAlgId.getAlgorithm().equals(PKCSObjectIdentifiers.id_RSASSA_PSS))
{
RSASSAPSSparams rsaParams = RSASSAPSSparams.getInstance(params);
return getDigestAlgName(rsaParams.getHashAlgorithm().getAlgorithm()) + "WITHRSAANDMGF1";
}
}
if (oids.containsKey(sigAlgId.getAlgorithm()))
{
return (String)oids.get(sigAlgId.getAlgorithm());
}
return sigAlgId.getAlgorithm().getId();
}
private static String getDigestAlgName(
ASN1ObjectIdentifier digestAlgOID)
{
if (PKCSObjectIdentifiers.md5.equals(digestAlgOID))
{
return "MD5";
}
else if (OIWObjectIdentifiers.idSHA1.equals(digestAlgOID))
{
return "SHA1";
}
else if (NISTObjectIdentifiers.id_sha224.equals(digestAlgOID))
{
return "SHA224";
}
else if (NISTObjectIdentifiers.id_sha256.equals(digestAlgOID))
{
return "SHA256";
}
else if (NISTObjectIdentifiers.id_sha384.equals(digestAlgOID))
{
return "SHA384";
}
else if (NISTObjectIdentifiers.id_sha512.equals(digestAlgOID))
{
return "SHA512";
}
else if (TeleTrusTObjectIdentifiers.ripemd128.equals(digestAlgOID))
{
return "RIPEMD128";
}
else if (TeleTrusTObjectIdentifiers.ripemd160.equals(digestAlgOID))
{
return "RIPEMD160";
}
else if (TeleTrusTObjectIdentifiers.ripemd256.equals(digestAlgOID))
{
return "RIPEMD256";
}
else if (CryptoProObjectIdentifiers.gostR3411.equals(digestAlgOID))
{
return "GOST3411";
}
else
{
return digestAlgOID.getId();
}
}
public X509Certificate convertCertificate(X509CertificateHolder certHolder)
throws CertificateException
{
try
{
CertificateFactory certFact = helper.createCertificateFactory("X.509");
return (X509Certificate)certFact.generateCertificate(new ByteArrayInputStream(certHolder.getEncoded()));
}
catch (IOException e)
{
throw new OpCertificateException("cannot get encoded form of certificate: " + e.getMessage(), e);
}
catch (NoSuchAlgorithmException e)
{
throw new OpCertificateException("cannot create certificate factory: " + e.getMessage(), e);
}
catch (NoSuchProviderException e)
{
throw new OpCertificateException("cannot find factory provider: " + e.getMessage(), e);
}
}
// TODO: put somewhere public so cause easily accessed
private static class OpCertificateException
extends CertificateException
{
private Throwable cause;
public OpCertificateException(String msg, Throwable cause)
{
super(msg);
this.cause = cause;
}
public Throwable getCause()
{
return cause;
}
}
String getKeyAlgorithmName(ASN1ObjectIdentifier oid)
{
String name = (String)symmetricKeyAlgNames.get(oid);
if (name != null)
{
return name;
}
return oid.getId();
}
}
| |
package com.daedafusion.knowledge.trinity.triples.query;
import com.daedafusion.configuration.Configuration;
import com.daedafusion.knowledge.trinity.Query;
import com.daedafusion.knowledge.trinity.TripleMeta;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import org.apache.log4j.Logger;
import java.io.Closeable;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
/**
* Created by mphilpot on 8/25/14.
*/
public class MultiQueryEngine implements Closeable
{
private static final Logger log = Logger.getLogger(MultiQueryEngine.class);
private final QueryEnginePool pool;
private final ListeningExecutorService service;
private MultiQueryEngine()
{
pool = QueryEnginePool.getInstance();
service = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(Configuration.getInstance().getInteger("multiQueryEngine.threads", 10)));
}
public static MultiQueryEngine getInstance()
{
MultiQueryEngine mqe = new MultiQueryEngine();
mqe.init();
return mqe;
}
private void init()
{
}
public class Result
{
private Model model;
private Map<String, Map<String, String>> cursorMap;
private Map<Integer, List<TripleMeta>> metaData;
private List<ResultSet> resultSet;
public Result()
{
metaData = new HashMap<>();
cursorMap = new HashMap<>();
resultSet = new ArrayList<>();
}
public Model getModel()
{
return model;
}
public Map<String, Map<String, String>> getCursorMap()
{
return cursorMap;
}
public Map<Integer, List<TripleMeta>> getMetaData()
{
return metaData;
}
public List<ResultSet> getResultSet()
{
return resultSet;
}
}
private class QueryCallable implements Callable<Result>
{
private final Query q;
public QueryCallable(Query q)
{
this.q = q;
}
@Override
public Result call() throws Exception
{
QueryEngine engine = pool.getPool().borrowObject();
try
{
Model model = engine.graphQuery(q);
Result r = new Result();
r.model = model;
r.cursorMap.putAll(engine.getContext().getCursorMap());
r.metaData.putAll(engine.getContext().getMetadata());
return r;
}
finally
{
if (engine != null)
{
pool.getPool().returnObject(engine);
}
}
}
}
private class SelectCallable implements Callable<Result>
{
private final Query q;
public SelectCallable(Query q)
{
this.q = q;
}
@Override
public Result call() throws Exception
{
QueryEngine engine = pool.getPool().borrowObject();
try
{
ResultSet rs = engine.execSelect(q);
Result r = new Result();
r.resultSet = Collections.singletonList(rs);
r.cursorMap.putAll(engine.getContext().getCursorMap());
r.metaData.putAll(engine.getContext().getMetadata());
return r;
}
finally
{
if (engine != null)
{
pool.getPool().returnObject(engine);
}
}
}
}
public Result multiQuery(List<Query> queries) throws ExecutionException, InterruptedException
{
List<ListenableFuture<Result>> futures = new ArrayList<>();
for (Query q : queries)
{
futures.add(service.submit(new QueryCallable(q)));
}
ListenableFuture<List<Result>> fanIn = Futures.allAsList(futures);
List<Result> list = fanIn.get();
Result result = new Result();
result.model = ModelFactory.createDefaultModel();
for (Result r : list)
{
result.model.add(r.model);
result.cursorMap.putAll(r.cursorMap);
for(Map.Entry<Integer, List<TripleMeta>> entry : r.getMetaData().entrySet())
{
if(result.metaData.containsKey(entry.getKey()))
{
result.metaData.get(entry.getKey()).addAll(entry.getValue());
}
else
{
result.metaData.put(entry.getKey(), entry.getValue());
}
}
}
return result;
}
public Result multiSelect(List<Query> queries) throws ExecutionException, InterruptedException
{
List<ListenableFuture<Result>> futures = new ArrayList<>();
for (Query q : queries)
{
futures.add(service.submit(new SelectCallable(q)));
}
ListenableFuture<List<Result>> fanIn = Futures.allAsList(futures);
List<Result> list = fanIn.get();
Result result = new Result();
result.model = ModelFactory.createDefaultModel();
for (Result r : list)
{
result.resultSet.addAll(r.resultSet);
result.cursorMap.putAll(r.cursorMap);
for(Map.Entry<Integer, List<TripleMeta>> entry : r.getMetaData().entrySet())
{
if(result.metaData.containsKey(entry.getKey()))
{
result.metaData.get(entry.getKey()).addAll(entry.getValue());
}
else
{
result.metaData.put(entry.getKey(), entry.getValue());
}
}
}
return result;
}
public Result multiQuery(Query... queries) throws ExecutionException, InterruptedException
{
List<Query> list = new ArrayList<>();
Collections.addAll(list, queries);
return multiQuery(list);
}
@Override
public void close() throws IOException
{
pool.getPool().close();
service.shutdown();
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.util.AsciiString;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import static io.netty.handler.codec.http.HttpResponseStatus.*;
import static io.netty.util.ReferenceCountUtil.*;
/**
* Client-side handler for handling an HTTP upgrade handshake to another protocol. When the first
* HTTP request is sent, this handler will add all appropriate headers to perform an upgrade to the
* new protocol. If the upgrade fails (i.e. response is not 101 Switching Protocols), this handler
* simply removes itself from the pipeline. If the upgrade is successful, upgrades the pipeline to
* the new protocol.
*/
public class HttpClientUpgradeHandler extends HttpObjectAggregator {
/**
* User events that are fired to notify about upgrade status.
*/
public enum UpgradeEvent {
/**
* The Upgrade request was sent to the server.
*/
UPGRADE_ISSUED,
/**
* The Upgrade to the new protocol was successful.
*/
UPGRADE_SUCCESSFUL,
/**
* The Upgrade was unsuccessful due to the server not issuing
* with a 101 Switching Protocols response.
*/
UPGRADE_REJECTED
}
/**
* The source codec that is used in the pipeline initially.
*/
public interface SourceCodec {
/**
* Removes this codec (i.e. all associated handlers) from the pipeline.
*/
void upgradeFrom(ChannelHandlerContext ctx);
}
/**
* A codec that the source can be upgraded to.
*/
public interface UpgradeCodec {
/**
* Returns the name of the protocol supported by this codec, as indicated by the {@code 'UPGRADE'} header.
*/
CharSequence protocol();
/**
* Sets any protocol-specific headers required to the upgrade request. Returns the names of
* all headers that were added. These headers will be used to populate the CONNECTION header.
*/
Collection<CharSequence> setUpgradeHeaders(ChannelHandlerContext ctx, HttpRequest upgradeRequest);
/**
* Performs an HTTP protocol upgrade from the source codec. This method is responsible for
* adding all handlers required for the new protocol.
*
* @param ctx the context for the current handler.
* @param upgradeResponse the 101 Switching Protocols response that indicates that the server
* has switched to this protocol.
*/
void upgradeTo(ChannelHandlerContext ctx, FullHttpResponse upgradeResponse) throws Exception;
}
private final SourceCodec sourceCodec;
private final UpgradeCodec upgradeCodec;
private boolean upgradeRequested;
/**
* Constructs the client upgrade handler.
*
* @param sourceCodec the codec that is being used initially.
* @param upgradeCodec the codec that the client would like to upgrade to.
* @param maxContentLength the maximum length of the aggregated content.
*/
public HttpClientUpgradeHandler(SourceCodec sourceCodec, UpgradeCodec upgradeCodec,
int maxContentLength) {
super(maxContentLength);
if (sourceCodec == null) {
throw new NullPointerException("sourceCodec");
}
if (upgradeCodec == null) {
throw new NullPointerException("upgradeCodec");
}
this.sourceCodec = sourceCodec;
this.upgradeCodec = upgradeCodec;
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
if (!(msg instanceof HttpRequest)) {
super.write(ctx, msg, promise);
return;
}
if (upgradeRequested) {
promise.setFailure(new IllegalStateException(
"Attempting to write HTTP request with upgrade in progress"));
return;
}
upgradeRequested = true;
setUpgradeRequestHeaders(ctx, (HttpRequest) msg);
// Continue writing the request.
super.write(ctx, msg, promise);
// Notify that the upgrade request was issued.
ctx.fireUserEventTriggered(UpgradeEvent.UPGRADE_ISSUED);
// Now we wait for the next HTTP response to see if we switch protocols.
}
@Override
protected void decode(ChannelHandlerContext ctx, HttpObject msg, List<Object> out)
throws Exception {
FullHttpResponse response = null;
try {
if (!upgradeRequested) {
throw new IllegalStateException("Read HTTP response without requesting protocol switch");
}
if (msg instanceof FullHttpResponse) {
response = (FullHttpResponse) msg;
// Need to retain since the base class will release after returning from this method.
response.retain();
out.add(response);
} else {
// Call the base class to handle the aggregation of the full request.
super.decode(ctx, msg, out);
if (out.isEmpty()) {
// The full request hasn't been created yet, still awaiting more data.
return;
}
assert out.size() == 1;
response = (FullHttpResponse) out.get(0);
}
if (!SWITCHING_PROTOCOLS.equals(response.status())) {
// The server does not support the requested protocol, just remove this handler
// and continue processing HTTP.
// NOTE: not releasing the response since we're letting it propagate to the
// next handler.
ctx.fireUserEventTriggered(UpgradeEvent.UPGRADE_REJECTED);
removeThisHandler(ctx);
return;
}
CharSequence upgradeHeader = response.headers().get(HttpHeaderNames.UPGRADE);
if (upgradeHeader != null && !AsciiString.contentEqualsIgnoreCase(upgradeCodec.protocol(), upgradeHeader)) {
throw new IllegalStateException(
"Switching Protocols response with unexpected UPGRADE protocol: " + upgradeHeader);
}
// Upgrade to the new protocol.
sourceCodec.upgradeFrom(ctx);
upgradeCodec.upgradeTo(ctx, response);
// Notify that the upgrade to the new protocol completed successfully.
ctx.fireUserEventTriggered(UpgradeEvent.UPGRADE_SUCCESSFUL);
// We switched protocols, so we're done with the upgrade response.
// Release it and clear it from the output.
response.release();
out.clear();
removeThisHandler(ctx);
} catch (Throwable t) {
release(response);
ctx.fireExceptionCaught(t);
removeThisHandler(ctx);
}
}
private static void removeThisHandler(ChannelHandlerContext ctx) {
ctx.pipeline().remove(ctx.name());
}
/**
* Adds all upgrade request headers necessary for an upgrade to the supported protocols.
*/
private void setUpgradeRequestHeaders(ChannelHandlerContext ctx, HttpRequest request) {
// Set the UPGRADE header on the request.
request.headers().set(HttpHeaderNames.UPGRADE, upgradeCodec.protocol());
// Add all protocol-specific headers to the request.
Set<CharSequence> connectionParts = new LinkedHashSet<CharSequence>(2);
connectionParts.addAll(upgradeCodec.setUpgradeHeaders(ctx, request));
// Set the CONNECTION header from the set of all protocol-specific headers that were added.
StringBuilder builder = new StringBuilder();
for (CharSequence part : connectionParts) {
builder.append(part);
builder.append(',');
}
builder.append(HttpHeaderValues.UPGRADE);
request.headers().set(HttpHeaderNames.CONNECTION, builder.toString());
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.avs.v2020_03_20.implementation;
import com.microsoft.azure.AzureClient;
import com.microsoft.azure.AzureServiceClient;
import com.microsoft.rest.credentials.ServiceClientCredentials;
import com.microsoft.rest.RestClient;
/**
* Initializes a new instance of the AvsClientImpl class.
*/
public class AvsClientImpl extends AzureServiceClient {
/** the {@link AzureClient} used for long running operations. */
private AzureClient azureClient;
/**
* Gets the {@link AzureClient} used for long running operations.
* @return the azure client;
*/
public AzureClient getAzureClient() {
return this.azureClient;
}
/** The API version to use for this operation. */
private String apiVersion;
/**
* Gets The API version to use for this operation.
*
* @return the apiVersion value.
*/
public String apiVersion() {
return this.apiVersion;
}
/** The ID of the target subscription. */
private String subscriptionId;
/**
* Gets The ID of the target subscription.
*
* @return the subscriptionId value.
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* Sets The ID of the target subscription.
*
* @param subscriptionId the subscriptionId value.
* @return the service client itself
*/
public AvsClientImpl withSubscriptionId(String subscriptionId) {
this.subscriptionId = subscriptionId;
return this;
}
/** The preferred language for the response. */
private String acceptLanguage;
/**
* Gets The preferred language for the response.
*
* @return the acceptLanguage value.
*/
public String acceptLanguage() {
return this.acceptLanguage;
}
/**
* Sets The preferred language for the response.
*
* @param acceptLanguage the acceptLanguage value.
* @return the service client itself
*/
public AvsClientImpl withAcceptLanguage(String acceptLanguage) {
this.acceptLanguage = acceptLanguage;
return this;
}
/** The retry timeout in seconds for Long Running Operations. Default value is 30. */
private int longRunningOperationRetryTimeout;
/**
* Gets The retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @return the longRunningOperationRetryTimeout value.
*/
public int longRunningOperationRetryTimeout() {
return this.longRunningOperationRetryTimeout;
}
/**
* Sets The retry timeout in seconds for Long Running Operations. Default value is 30.
*
* @param longRunningOperationRetryTimeout the longRunningOperationRetryTimeout value.
* @return the service client itself
*/
public AvsClientImpl withLongRunningOperationRetryTimeout(int longRunningOperationRetryTimeout) {
this.longRunningOperationRetryTimeout = longRunningOperationRetryTimeout;
return this;
}
/** Whether a unique x-ms-client-request-id should be generated. When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. */
private boolean generateClientRequestId;
/**
* Gets Whether a unique x-ms-client-request-id should be generated. When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @return the generateClientRequestId value.
*/
public boolean generateClientRequestId() {
return this.generateClientRequestId;
}
/**
* Sets Whether a unique x-ms-client-request-id should be generated. When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true.
*
* @param generateClientRequestId the generateClientRequestId value.
* @return the service client itself
*/
public AvsClientImpl withGenerateClientRequestId(boolean generateClientRequestId) {
this.generateClientRequestId = generateClientRequestId;
return this;
}
/**
* The OperationsInner object to access its operations.
*/
private OperationsInner operations;
/**
* Gets the OperationsInner object to access its operations.
* @return the OperationsInner object.
*/
public OperationsInner operations() {
return this.operations;
}
/**
* The LocationsInner object to access its operations.
*/
private LocationsInner locations;
/**
* Gets the LocationsInner object to access its operations.
* @return the LocationsInner object.
*/
public LocationsInner locations() {
return this.locations;
}
/**
* The PrivateCloudsInner object to access its operations.
*/
private PrivateCloudsInner privateClouds;
/**
* Gets the PrivateCloudsInner object to access its operations.
* @return the PrivateCloudsInner object.
*/
public PrivateCloudsInner privateClouds() {
return this.privateClouds;
}
/**
* The ClustersInner object to access its operations.
*/
private ClustersInner clusters;
/**
* Gets the ClustersInner object to access its operations.
* @return the ClustersInner object.
*/
public ClustersInner clusters() {
return this.clusters;
}
/**
* The HcxEnterpriseSitesInner object to access its operations.
*/
private HcxEnterpriseSitesInner hcxEnterpriseSites;
/**
* Gets the HcxEnterpriseSitesInner object to access its operations.
* @return the HcxEnterpriseSitesInner object.
*/
public HcxEnterpriseSitesInner hcxEnterpriseSites() {
return this.hcxEnterpriseSites;
}
/**
* The AuthorizationsInner object to access its operations.
*/
private AuthorizationsInner authorizations;
/**
* Gets the AuthorizationsInner object to access its operations.
* @return the AuthorizationsInner object.
*/
public AuthorizationsInner authorizations() {
return this.authorizations;
}
/**
* Initializes an instance of AvsClient client.
*
* @param credentials the management credentials for Azure
*/
public AvsClientImpl(ServiceClientCredentials credentials) {
this("https://management.azure.com", credentials);
}
/**
* Initializes an instance of AvsClient client.
*
* @param baseUrl the base URL of the host
* @param credentials the management credentials for Azure
*/
public AvsClientImpl(String baseUrl, ServiceClientCredentials credentials) {
super(baseUrl, credentials);
initialize();
}
/**
* Initializes an instance of AvsClient client.
*
* @param restClient the REST client to connect to Azure.
*/
public AvsClientImpl(RestClient restClient) {
super(restClient);
initialize();
}
protected void initialize() {
this.apiVersion = "2020-03-20";
this.acceptLanguage = "en-US";
this.longRunningOperationRetryTimeout = 30;
this.generateClientRequestId = true;
this.operations = new OperationsInner(restClient().retrofit(), this);
this.locations = new LocationsInner(restClient().retrofit(), this);
this.privateClouds = new PrivateCloudsInner(restClient().retrofit(), this);
this.clusters = new ClustersInner(restClient().retrofit(), this);
this.hcxEnterpriseSites = new HcxEnterpriseSitesInner(restClient().retrofit(), this);
this.authorizations = new AuthorizationsInner(restClient().retrofit(), this);
this.azureClient = new AzureClient(this);
}
/**
* Gets the User-Agent header for the client.
*
* @return the user agent string.
*/
@Override
public String userAgent() {
return String.format("%s (%s, %s, auto-generated)", super.userAgent(), "AvsClient", "2020-03-20");
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package poi.ss.examples;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.*;
import java.io.*;
import java.util.ArrayList;
/**
* Demonstrates <em>one</em> way to convert an Excel spreadsheet into a CSV
* file. This class makes the following assumptions;
* <list>
* <li>1. Where the Excel workbook contains more that one worksheet, then a single
* CSV file will contain the data from all of the worksheets.</li>
* <li>2. The data matrix contained in the CSV file will be square. This means that
* the number of fields in each record of the CSV file will match the number
* of cells in the longest row found in the Excel workbook. Any short records
* will be 'padded' with empty fields - an empty field is represented in the
* the CSV file in this way - ,,.</li>
* <li>3. Empty fields will represent missing cells.</li>
* <li>4. A record consisting of empty fields will be used to represent an empty row
* in the Excel workbook.</li>
* </list>
* Therefore, if the worksheet looked like this;
*
* <pre>
* ___________________________________________
* | | | | | |
* | A | B | C | D | E |
* ___|_______|_______|_______|_______|_______|
* | | | | | |
* 1 | 1 | 2 | 3 | 4 | 5 |
* ___|_______|_______|_______|_______|_______|
* | | | | | |
* 2 | | | | | |
* ___|_______|_______|_______|_______|_______|
* | | | | | |
* 3 | | A | | B | |
* ___|_______|_______|_______|_______|_______|
* | | | | | |
* 4 | | | | | Z |
* ___|_______|_______|_______|_______|_______|
* | | | | | |
* 5 | 1,400 | | 250 | | |
* ___|_______|_______|_______|_______|_______|
*
* </pre>
*
* Then, the resulting CSV file will contain the following lines (records);
* <pre>
* 1,2,3,4,5
* ,,,,
* ,A,,B,
* ,,,,Z
* "1,400",,250,,
* </pre><p>
* Typically, the comma is used to separate each of the fields that, together,
* constitute a single record or line within the CSV file. This is not however
* a hard and fast rule and so this class allows the user to determine which
* character is used as the field separator and assumes the comma if none other
* is specified.
* </p><p>
* If a field contains the separator then it will be escaped. If the file should
* obey Excel's CSV formatting rules, then the field will be surrounded with
* speech marks whilst if it should obey UNIX conventions, each occurrence of
* the separator will be preceded by the backslash character.
* </p><p>
* If a field contains an end of line (EOL) character then it too will be
* escaped. If the file should obey Excel's CSV formatting rules then the field
* will again be surrounded by speech marks. On the other hand, if the file
* should follow UNIX conventions then a single backslash will precede the
* EOL character. There is no single applicable standard for UNIX and some
* appications replace the CR with \r and the LF with \n but this class will
* not do so.
* </p><p>
* If the field contains double quotes then that character will be escaped. It
* seems as though UNIX does not define a standard for this whilst Excel does.
* Should the CSV file have to obey Excel's formmating rules then the speech
* mark character will be escaped with a second set of speech marks. Finally, an
* enclosing set of speah marks will also surround the entire field. Thus, if
* the following line of text appeared in a cell - "Hello" he said - it would
* look like this when converted into a field within a CSV file - """Hello"" he
* said".
* </p><p>
* Finally, it is worth noting that talk of CSV 'standards' is really slightly
* missleading as there is no such thing. It may well be that the code in this
* class has to be modified to produce files to suit a specific application
* or requirement.
* </p>
* @author Mark B
* @version 1.00 9th April 2010
* 1.10 13th April 2010 - Added support for processing all Excel
* workbooks in a folder along with the ability
* to specify a field separator character.
* 2.00 14th April 2010 - Added support for embedded characters; the
* field separator, EOL and double quotes or
* speech marks. In addition, gave the client
* the ability to select how these are handled,
* either obeying Excel's or UNIX formatting
* conventions.
*/
public class ToCSV {
private Workbook workbook = null;
private ArrayList<ArrayList> csvData = null;
private int maxRowWidth = 0;
private int formattingConvention = 0;
private DataFormatter formatter = null;
private FormulaEvaluator evaluator = null;
private String separator = null;
private static final String CSV_FILE_EXTENSION = ".csv";
private static final String DEFAULT_SEPARATOR = ",";
/**
* Identifies that the CSV file should obey Excel's formatting conventions
* with regard to escaping certain embedded characters - the field separator,
* speech mark and end of line (EOL) character
*/
public static final int EXCEL_STYLE_ESCAPING = 0;
/**
* Identifies that the CSV file should obey UNIX formatting conventions
* with regard to escaping certain embedded characters - the field separator
* and end of line (EOL) character
*/
public static final int UNIX_STYLE_ESCAPING = 1;
/**
* Process the contents of a folder, convert the contents of each Excel
* workbook into CSV format and save the resulting file to the specified
* folder using the same name as the original workbook with the .xls or
* .xlsx extension replaced by .csv. This method will ensure that the
* CSV file created contains the comma field separator and that embedded
* characters such as the field separator, the EOL and double quotes are
* escaped in accordance with Excel's convention.
*
* @param strSource An instance of the String class that encapsulates the
* name of and path to either a folder containing those Excel
* workbook(s) or the name of and path to an individual Excel workbook
* that is/are to be converted.
* @param strDestination An instance of the String class encapsulating the
* name of and path to a folder that will contain the resulting CSV
* files.
* @throws java.io.FileNotFoundException Thrown if any file cannot be located
* on the filesystem during processing.
* @throws java.io.IOException Thrown if the filesystem encounters any
* problems during processing.
* @throws IllegalArgumentException Thrown if the values passed
* to the strSource parameter refers to a file or folder that does not
* exist or if the value passed to the strDestination paramater refers
* to a folder that does not exist or simply does not refer to a
* folder.
* @throws org.apache.poi.openxml4j.exceptions.InvalidFormatException Thrown
* if the xml markup encountered whilst parsing a SpreadsheetML
* file (.xlsx) is invalid.
*/
public void convertExcelToCSV(String strSource, String strDestination)
throws FileNotFoundException, IOException,
IllegalArgumentException, InvalidFormatException {
// Simply chain the call to the overloaded convertExcelToCSV(String,
// String, String, int) method, pass the default separator and ensure
// that certain embedded characters are escaped in accordance with
// Excel's formatting conventions
this.convertExcelToCSV(strSource, strDestination,
ToCSV.DEFAULT_SEPARATOR, ToCSV.EXCEL_STYLE_ESCAPING);
}
/**
* Process the contents of a folder, convert the contents of each Excel
* workbook into CSV format and save the resulting file to the specified
* folder using the same name as the original workbook with the .xls or
* .xlsx extension replaced by .csv. This method allows the client to
* define the field separator but will ensure that embedded characters such
* as the field separator, the EOL and double quotes are escaped in
* accordance with Excel's convention.
*
* @param strSource An instance of the String class that encapsulates the
* name of and path to either a folder containing those Excel
* workbook(s) or the name of and path to an individual Excel workbook
* that is/are to be converted.
* @param strDestination An instance of the String class encapsulating the
* name of and path to a folder that will contain the resulting CSV
* files.
* @param separator An instance of the String class that encapsulates the
* character or characters the client wishes to use as the field
* separator.
* @throws java.io.FileNotFoundException Thrown if any file cannot be located
* on the filesystem during processing.
* @throws java.io.IOException Thrown if the filesystem encounters any
* problems during processing.
* @throws IllegalArgumentException Thrown if the values passed
* to the strSource parameter refers to a file or folder that does not
* exist or if the value passed to the strDestination paramater refers
* to a folder that does not exist or simply does not refer to a
* folder.
* @throws org.apache.poi.openxml4j.exceptions.InvalidFormatException Thrown
* if the xml markup encounetered whilst parsing a SpreadsheetML
* file (.xlsx) is invalid.
*/
public void convertExcelToCSV(String strSource, String strDestination,
String separator)
throws FileNotFoundException, IOException,
IllegalArgumentException, InvalidFormatException {
// Simply chain the call to the overloaded convertExcelToCSV(String,
// String, String, int) method and ensure that certain embedded
// characters are escaped in accordance with Excel's formatting
// conventions
this.convertExcelToCSV(strSource, strDestination,
separator, ToCSV.EXCEL_STYLE_ESCAPING);
}
/**
* Process the contents of a folder, convert the contents of each Excel
* workbook into CSV format and save the resulting file to the specified
* folder using the same name as the original workbook with the .xls or
* .xlsx extension replaced by .csv
*
* @param strSource An instance of the String class that encapsulates the
* name of and path to either a folder containing those Excel
* workbook(s) or the name of and path to an individual Excel workbook
* that is/are to be converted.
* @param strDestination An instance of the String class encapsulating the name
* of and path to a folder that will contain the resulting CSV files.
* @param formattingConvention A primitive int whose value will determine
* whether certain embedded characters should be escaped in accordance
* with Excel's or UNIX formatting conventions. Two constants are
* defined to support this option; ToCSV.EXCEL_STYLE_ESCAPING and
* ToCSV.UNIX_STYLE_ESCAPING
* @param separator An instance of the String class encapsulating the
* characters or characters that should be used to separate items
* on a line within the CSV file.
* @throws java.io.FileNotFoundException Thrown if any file cannot be located
* on the filesystem during processing.
* @throws java.io.IOException Thrown if the filesystem encounters any
* problems during processing.
* @throws IllegalArgumentException Thrown if the values passed
* to the strSource parameter refers to a file or folder that does not
* exist, if the value passed to the strDestination paramater refers
* to a folder that does not exist, if the value passed to the
* strDestination parameter does not refer to a folder or if the
* value passed to the formattingConvention parameter is other than
* one of the values defined by the constants ToCSV.EXCEL_STYLE_ESCAPING
* and ToCSV.UNIX_STYLE_ESCAPING.
* @throws org.apache.poi.openxml4j.exceptions.InvalidFormatException Thrown
* if the xml markup encounetered whilst parsing a SpreadsheetML
* file (.xlsx) is invalid.
*/
public void convertExcelToCSV(String strSource, String strDestination,
String separator, int formattingConvention)
throws FileNotFoundException, IOException,
IllegalArgumentException, InvalidFormatException {
File source = new File(strSource);
File destination = new File(strDestination);
File[] filesList = null;
String destinationFilename = null;
// Check that the source file/folder exists.
if(!source.exists()) {
throw new IllegalArgumentException("The source for the Excel " +
"file(s) cannot be found.");
}
// Ensure thaat the folder the user has chosen to save the CSV files
// away into firstly exists and secondly is a folder rather than, for
// instance, a data file.
if(!destination.exists()) {
throw new IllegalArgumentException("The folder/directory for the " +
"converted CSV file(s) does not exist.");
}
if(!destination.isDirectory()) {
throw new IllegalArgumentException("The destination for the CSV " +
"file(s) is not a directory/folder.");
}
// Ensure the value passed to the formattingConvention parameter is
// within range.
if(formattingConvention != ToCSV.EXCEL_STYLE_ESCAPING &&
formattingConvention != ToCSV.UNIX_STYLE_ESCAPING) {
throw new IllegalArgumentException("The value passed to the " +
"formattingConvention parameter is out of range.");
}
// Copy the spearator character and formatting convention into local
// variables for use in other methods.
this.separator = separator;
this.formattingConvention = formattingConvention;
// Check to see if the sourceFolder variable holds a reference to
// a file or a folder full of files.
if(source.isDirectory()) {
// Get a list of all of the Excel spreadsheet files (workbooks) in
// the source folder/directory
filesList = source.listFiles(new ExcelFilenameFilter());
}
else {
// Assume that it must be a file handle - although there are other
// options the code should perhaps check - and store the reference
// into the filesList variable.
filesList = new File[]{source};
}
// Step through each of the files in the source folder and for each
// open the workbook, convert it's contents to CSV format and then
// save the resulting file away into the folder specified by the
// contents of the destination variable. Note that the name of the
// csv file will be created by taking the name of the Excel file,
// removing the extension and replacing it with .csv. Note that there
// is one drawback with this approach; if the folder holding the files
// contains two workbooks whose names match but one is a binary file
// (.xls) and the other a SpreadsheetML file (.xlsx), then the names
// for both CSV files will be identical and one CSV file will,
// therefore, over-write the other.
for(File excelFile : filesList) {
// Open the workbook
this.openWorkbook(excelFile);
// Convert it's contents into a CSV file
this.convertToCSV();
// Build the name of the csv folder from that of the Excel workbook.
// Simply replace the .xls or .xlsx file extension with .csv
destinationFilename = excelFile.getName();
destinationFilename = destinationFilename.substring(
0, destinationFilename.lastIndexOf(".")) +
ToCSV.CSV_FILE_EXTENSION;
// Save the CSV file away using the newly constricted file name
// and to the specified directory.
this.saveCSVFile(new File(destination, destinationFilename));
}
}
/**
* Open an Excel workbook ready for conversion.
*
* @param file An instance of the File class that encapsulates a handle
* to a valid Excel workbook. Note that the workbook can be in
* either binary (.xls) or SpreadsheetML (.xlsx) format.
* @throws java.io.FileNotFoundException Thrown if the file cannot be located.
* @throws java.io.IOException Thrown if a problem occurs in the file system.
* @throws org.apache.poi.openxml4j.exceptions.InvalidFormatException Thrown
* if invalid xml is found whilst parsing an input SpreadsheetML
* file.
*/
private void openWorkbook(File file) throws FileNotFoundException,
IOException, InvalidFormatException {
FileInputStream fis = null;
try {
System.out.println("Opening workbook [" + file.getName() + "]");
fis = new FileInputStream(file);
// Open the workbook and then create the FormulaEvaluator and
// DataFormatter instances that will be needed to, respectively,
// force evaluation of forumlae found in cells and create a
// formatted String encapsulating the cells contents.
this.workbook = WorkbookFactory.create(fis);
this.evaluator = this.workbook.getCreationHelper().createFormulaEvaluator();
this.formatter = new DataFormatter(true);
}
finally {
if(fis != null) {
fis.close();
}
}
}
/**
* Called to convert the contents of the currently opened workbook into
* a CSV file.
*/
private void convertToCSV() {
Sheet sheet = null;
Row row = null;
int lastRowNum = 0;
this.csvData = new ArrayList<ArrayList>();
System.out.println("Converting files contents to CSV format.");
// Discover how many sheets there are in the workbook....
int numSheets = this.workbook.getNumberOfSheets();
// and then iterate through them.
for(int i = 0; i < numSheets; i++) {
// Get a reference to a sheet and check to see if it contains
// any rows.
sheet = this.workbook.getSheetAt(i);
if(sheet.getPhysicalNumberOfRows() > 0) {
// Note down the index number of the bottom-most row and
// then iterate through all of the rows on the sheet starting
// from the very first row - number 1 - even if it is missing.
// Recover a reference to the row and then call another method
// which will strip the data from the cells and build lines
// for inclusion in the resylting CSV file.
lastRowNum = sheet.getLastRowNum();
for(int j = 0; j <= lastRowNum; j++) {
row = sheet.getRow(j);
this.rowToCSV(row);
}
}
}
}
/**
* Called to actually save the data recovered from the Excel workbook
* as a CSV file.
*
* @param file An instance of the File class that encapsulates a handle
* referring to the CSV file.
* @throws java.io.FileNotFoundException Thrown if the file cannot be found.
* @throws java.io.IOException Thrown to indicate and error occurred in the
* underylying file system.
*/
private void saveCSVFile(File file)
throws FileNotFoundException, IOException {
FileWriter fw = null;
BufferedWriter bw = null;
ArrayList<String> line = null;
StringBuffer buffer = null;
String csvLineElement = null;
try {
System.out.println("Saving the CSV file [" + file.getName() + "]");
// Open a writer onto the CSV file.
fw = new FileWriter(file);
bw = new BufferedWriter(fw);
// Step through the elements of the ArrayList that was used to hold
// all of the data recovered from the Excel workbooks' sheets, rows
// and cells.
for(int i = 0; i < this.csvData.size(); i++) {
buffer = new StringBuffer();
// Get an element from the ArrayList that contains the data for
// the workbook. This element will itself be an ArrayList
// containing Strings and each String will hold the data recovered
// from a single cell. The for() loop is used to recover elements
// from this 'row' ArrayList one at a time and to write the Strings
// away to a StringBuffer thus assembling a single line for inclusion
// in the CSV file. If a row was empty or if it was short, then
// the ArrayList that contains it's data will also be shorter than
// some of the others. Therefore, it is necessary to check within
// the for loop to ensure that the ArrayList contains data to be
// processed. If it does, then an element will be recovered and
// appended to the StringBuffer.
line = this.csvData.get(i);
for(int j = 0; j < this.maxRowWidth; j++) {
if(line.size() > j) {
csvLineElement = line.get(j);
if(csvLineElement != null) {
buffer.append(this.escapeEmbeddedCharacters(
csvLineElement));
}
}
if(j < (this.maxRowWidth - 1)) {
buffer.append(this.separator);
}
}
// Once the line is built, write it away to the CSV file.
bw.write(buffer.toString().trim());
// Condition the inclusion of new line characters so as to
// avoid an additional, superfluous, new line at the end of
// the file.
if(i < (this.csvData.size() - 1)) {
bw.newLine();
}
}
}
finally {
if(bw != null) {
bw.flush();
bw.close();
}
}
}
/**
* Called to convert a row of cells into a line of data that can later be
* output to the CSV file.
*
* @param row An instance of either the HSSFRow or XSSFRow classes that
* encapsulates information about a row of cells recovered from
* an Excel workbook.
*/
private void rowToCSV(Row row) {
Cell cell = null;
int lastCellNum = 0;
ArrayList<String> csvLine = new ArrayList<String>();
// Check to ensure that a row was recovered from the sheet as it is
// possible that one or more rows between other populated rows could be
// missing - blank. If the row does contain cells then...
if(row != null) {
// Get the index for the right most cell on the row and then
// step along the row from left to right recovering the contents
// of each cell, converting that into a formatted String and
// then storing the String into the csvLine ArrayList.
lastCellNum = row.getLastCellNum();
for(int i = 0; i <= lastCellNum; i++) {
cell = row.getCell(i);
if(cell == null) {
csvLine.add("");
}
else {
if(cell.getCellType() != Cell.CELL_TYPE_FORMULA) {
csvLine.add(this.formatter.formatCellValue(cell));
}
else {
csvLine.add(this.formatter.formatCellValue(cell, this.evaluator));
}
}
}
// Make a note of the index number of the right most cell. This value
// will later be used to ensure that the matrix of data in the CSV file
// is square.
if(lastCellNum > this.maxRowWidth) {
this.maxRowWidth = lastCellNum;
}
}
this.csvData.add(csvLine);
}
/**
* Checks to see whether the field - which consists of the formatted
* contents of an Excel worksheet cell encapsulated within a String - contains
* any embedded characters that must be escaped. The method is able to
* comply with either Excel's or UNIX formatting conventions in the
* following manner;
*
* With regard to UNIX conventions, if the field contains any embedded
* field separator or EOL characters they will each be escaped by prefixing
* a leading backspace character. These are the only changes that have yet
* emerged following some research as being required.
*
* Excel has other embedded character escaping requirements, some that emerged
* from empirical testing, other through research. Firstly, with regards to
* any embedded speech marks ("), each occurrence should be escaped with
* another speech mark and the whole field then surrounded with speech marks.
* Thus if a field holds <em>"Hello" he said</em> then it should be modified
* to appear as <em>"""Hello"" he said"</em>. Furthermore, if the field
* contains either embedded separator or EOL characters, it should also
* be surrounded with speech marks. As a result <em>1,400</em> would become
* <em>"1,400"</em> assuming that the comma is the required field separator.
* This has one consequence in, if a field contains embedded speech marks
* and embedded separator characters, checks for both are not required as the
* additional set of speech marks that should be placed around ay field
* containing embedded speech marks will also account for the embedded
* separator.
*
* It is worth making one further note with regard to embedded EOL
* characters. If the data in a worksheet is exported as a CSV file using
* Excel itself, then the field will be surounded with speech marks. If the
* resulting CSV file is then re-imports into another worksheet, the EOL
* character will result in the original simgle field occupying more than
* one cell. This same 'feature' is replicated in this classes behaviour.
*
* @param field An instance of the String class encapsulating the formatted
* contents of a cell on an Excel worksheet.
* @return A String that encapsulates the formatted contents of that
* Excel worksheet cell but with any embedded separator, EOL or
* speech mark characters correctly escaped.
*/
private String escapeEmbeddedCharacters(String field) {
StringBuffer buffer = null;
// If the fields contents should be formatted to confrom with Excel's
// convention....
if(this.formattingConvention == ToCSV.EXCEL_STYLE_ESCAPING) {
// Firstly, check if there are any speech marks (") in the field;
// each occurrence must be escaped with another set of spech marks
// and then the entire field should be enclosed within another
// set of speech marks. Thus, "Yes" he said would become
// """Yes"" he said"
if(field.contains("\"")) {
buffer = new StringBuffer(field.replaceAll("\"", "\\\"\\\""));
buffer.insert(0, "\"");
buffer.append("\"");
}
else {
// If the field contains either embedded separator or EOL
// characters, then escape the whole field by surrounding it
// with speech marks.
buffer = new StringBuffer(field);
if((buffer.indexOf(this.separator)) > -1 ||
(buffer.indexOf("\n")) > -1) {
buffer.insert(0, "\"");
buffer.append("\"");
}
}
return(buffer.toString().trim());
}
// The only other formatting convention this class obeys is the UNIX one
// where any occurrence of the field separator or EOL character will
// be escaped by preceding it with a backslash.
else {
if(field.contains(this.separator)) {
field = field.replaceAll(this.separator, ("\\\\" + this.separator));
}
if(field.contains("\n")) {
field = field.replaceAll("\n", "\\\\\n");
}
return(field);
}
}
/**
* The main() method contains code that demonstrates how to use the class.
*
* @param args An array containing zero, one or more elements all of type
* String. Each element will encapsulate an argument specified by the
* user when running the program from the command prompt.
*/
public static void main(String[] args) {
// Check the number of arguments passed to the main method. There
// must be two, three or four; the name of and path to either the folder
// containing the Excel files or an individual Excel workbook that is/are
// to be converted, the name of and path to the folder to which the CSV
// files should be written, - optionally - the separator character
// that should be used to separate individual items (fields) on the
// lines (records) of the CSV file and - again optionally - an integer
// that idicates whether the CSV file ought to obey Excel's or UNIX
// convnetions with regard to formatting fields that contain embedded
// separator, Speech mark or EOL character(s).
//
// Note that the names of the CSV files will be derived from those
// of the Excel file(s). Put simply the .xls or .xlsx extension will be
// replaced with .csv. Therefore, if the source folder contains files
// with matching names but different extensions - Test.xls and Test.xlsx
// for example - then the CSV file generated from one will overwrite
// that generated from the other.
ToCSV converter = null;
try {
converter = new ToCSV();
if(args.length == 2) {
// Just the Source File/Folder and Destination Folder were
// passed to the main method.
converter.convertExcelToCSV(args[0], args[1]);
}
else if(args.length == 3){
// The Source File/Folder, Destination Folder and Separator
// were passed to the main method.
converter.convertExcelToCSV(args[0], args[1], args[2]);
}
else if(args.length == 4) {
// The Source File/Folder, Destination Folder, Separator and
// Formatting Convnetion were passed to the main method.
converter.convertExcelToCSV(args[0], args[1],
args[2], Integer.parseInt(args[3]));
}
else {
// None or more than four parameters were passed so display
//a Usage message.
System.out.println("Usage: java ToCSV [Source File/Folder] " +
"[Destination Folder] [Separator] [Formatting Convention]\n" +
"\tSource File/Folder\tThis argument should contain the name of and\n" +
"\t\t\t\tpath to either a single Excel workbook or a\n" +
"\t\t\t\tfolder containing one or more Excel workbooks.\n" +
"\tDestination Folder\tThe name of and path to the folder that the\n" +
"\t\t\t\tCSV files should be written out into. The\n" +
"\t\t\t\tfolder must exist before running the ToCSV\n" +
"\t\t\t\tcode as it will not check for or create it.\n" +
"\tSeparator\t\tOptional. The character or characters that\n" +
"\t\t\t\tshould be used to separate fields in the CSV\n" +
"\t\t\t\trecord. If no value is passed then the comma\n" +
"\t\t\t\twill be assumed.\n" +
"\tFormatting Convention\tOptional. This argument can take one of two\n" +
"\t\t\t\tvalues. Passing 0 (zero) will result in a CSV\n" +
"\t\t\t\tfile that obeys Excel's formatting conventions\n" +
"\t\t\t\twhilst passing 1 (one) will result in a file\n" +
"\t\t\t\tthat obeys UNIX formatting conventions. If no\n" +
"\t\t\t\tvalue is passed, then the CSV file produced\n" +
"\t\t\t\twill obey Excel's formatting conventions.");
}
}
// It is not wise to have such a wide catch clause - Exception is very
// close to being at the top of the inheritance hierarchy - though it
// will suffice for this example as it is really not possible to recover
// easilly from an exceptional set of circumstances at this point in the
// program. It should however, ideally be replaced with one or more
// catch clauses optimised to handle more specific problems.
catch(Exception ex) {
System.out.println("Caught an: " + ex.getClass().getName());
System.out.println("Message: " + ex.getMessage());
System.out.println("Stacktrace follows:.....");
ex.printStackTrace(System.out);
}
}
/**
* An instance of this class can be used to control the files returned
* be a call to the listFiles() method when made on an instance of the
* File class and that object refers to a folder/directory
*/
class ExcelFilenameFilter implements FilenameFilter {
/**
* Determine those files that will be returned by a call to the
* listFiles() method. In this case, the name of the file must end with
* either of the following two extension; '.xls' or '.xlsx'. For the
* future, it is very possible to parameterise this and allow the
* containing class to pass, for example, an array of Strings to this
* class on instantiation. Each element in that array could encapsulate
* a valid file extension - '.xls', '.xlsx', '.xlt', '.xlst', etc. These
* could then be used to control which files were returned by the call
* to the listFiles() method.
*
* @param file An instance of the File class that encapsulates a handle
* referring to the folder/directory that contains the file.
* @param name An instance of the String class that encapsulates the
* name of the file.
* @return A boolean value that indicates whether the file should be
* included in the array retirned by the call to the listFiles()
* method. In this case true will be returned if the name of the
* file ends with either '.xls' or '.xlsx' and false will be
* returned in all other instances.
*/
public boolean accept(File file, String name) {
return(name.endsWith(".xls") || name.endsWith(".xlsx"));
}
}
}
| |
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.util;
import com.google.common.collect.Lists;
import com.google.common.primitives.Booleans;
import com.google.common.primitives.Bytes;
import com.google.common.primitives.Chars;
import com.google.common.primitives.Doubles;
import com.google.common.primitives.Floats;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import com.google.common.primitives.Shorts;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.spongepowered.math.vector.Vector2i;
import org.spongepowered.math.vector.Vector3i;
import org.spongepowered.math.vector.Vector4i;
import org.spongepowered.math.vector.VectorNi;
import org.spongepowered.math.vector.Vectord;
import org.spongepowered.math.vector.Vectorf;
import org.spongepowered.math.vector.Vectorl;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utility class for coercing unknown values to specific target types.
*/
public final class Coerce {
private static final Pattern listPattern = Pattern.compile("^([\\(\\[\\{]?)(.+?)([\\)\\]\\}]?)$");
private static final String[] listPairings = { "([{", ")]}" };
private static final Pattern vector2Pattern = Pattern.compile("^\\( *(-?[\\d\\.]{1,10}), *(-?[\\d\\.]{1,10}) *\\)$");
/**
* No subclasses for you.
*/
private Coerce() {}
/**
* Coerce the supplied object to a string.
*
* @param obj Object to coerce
* @return Object as a string, empty string if the object is null
*/
public static String toString(@Nullable Object obj) {
if (obj == null) {
return "";
}
if (obj.getClass().isArray()) {
return Coerce.toList(obj).toString();
}
return obj.toString();
}
/**
* Gets the given object as a {@link String}.
*
* @param obj The object to translate
* @return The string value, if available
*/
public static Optional<String> asString(@Nullable Object obj) {
if (obj instanceof String) {
return Optional.of((String) obj);
} else if (obj == null) {
return Optional.empty();
} else {
return Optional.of(obj.toString());
}
}
/**
* Coerce the supplied object to a list. Accepts lists and all types of 1D
* arrays. Also (naively) supports lists in Strings in a format like
* <code>{1,2,3,I,am,a,list}</code>
*
* @param obj Object to coerce
* @return Some kind of List filled with unimaginable horrors
*/
public static List<?> toList(@Nullable Object obj) {
if (obj == null) {
return Collections.<Object>emptyList();
}
if (obj instanceof List) {
return (List<?>) obj;
}
final Class<?> clazz = obj.getClass();
if (clazz.isArray()) {
if (clazz.getComponentType().isPrimitive()) {
return Coerce.primitiveArrayToList(obj);
}
return Arrays.asList((Object[]) obj);
}
return Coerce.parseStringToList(obj.toString());
}
/**
* Gets the given object as a {@link List}.
*
* @param obj The object to translate
* @return The list, if available
*/
public static Optional<List<?>> asList(@Nullable Object obj) {
if (obj == null) {
return Optional.empty();
}
if (obj instanceof List) {
return Optional.<List<?>>of((List<?>) obj);
}
final Class<?> clazz = obj.getClass();
if (clazz.isArray()) {
if (clazz.getComponentType().isPrimitive()) {
return Optional.<List<?>>of(Coerce.primitiveArrayToList(obj));
}
return Optional.<List<?>>of(Arrays.asList((Object[]) obj));
}
return Optional.<List<?>>of(Coerce.parseStringToList(obj.toString()));
}
/**
* Coerce the specified object to a list containing only objects of type
* specified by <code>ofClass</code>. Also coerces list values where
* possible.
*
* @param obj Object to coerce
* @param ofClass Class to coerce to
* @param <T> type of list (notional)
* @return List of coerced values
*/
@SuppressWarnings("unchecked")
public static <T> List<T> toListOf(@Nullable Object obj, Class<T> ofClass) {
Objects.requireNonNull(ofClass, "ofClass");
final List<T> filteredList = Lists.newArrayList();
for (Object o : Coerce.toList(obj)) {
if (ofClass.isAssignableFrom(o.getClass())) {
filteredList.add((T) o);
} else if (ofClass.equals(String.class)) {
filteredList.add((T) Coerce.toString(o));
} else if (ofClass.equals(Integer.TYPE) || ofClass.equals(Integer.class)) {
filteredList.add((T) (Integer) Coerce.toInteger(o));
} else if (ofClass.equals(Float.TYPE) || ofClass.equals(Float.class)) {
filteredList.add((T) Float.valueOf((float) Coerce.toDouble(o)));
} else if (ofClass.equals(Double.TYPE) || ofClass.equals(Double.class)) {
filteredList.add((T) (Double) Coerce.toDouble(o));
} else if (ofClass.equals(Boolean.TYPE) || ofClass.equals(Boolean.class)) {
filteredList.add((T) (Boolean) Coerce.toBoolean(o));
}
}
return filteredList;
}
/**
* Coerce the supplied object to a boolean, matches strings such as "yes" as
* well as literal boolean values.
*
* @param obj Object to coerce
* @return Object as a boolean, <code>false</code> if the object is null
*/
public static boolean toBoolean(@Nullable Object obj) {
if (obj == null) {
return false;
}
return (obj instanceof Boolean) ? (Boolean) obj : obj.toString().trim().matches("^(1|true|yes)$");
}
/**
* Gets the given object as a {@link Boolean}.
*
* @param obj The object to translate
* @return The boolean, if available
*/
public static Optional<Boolean> asBoolean(@Nullable Object obj) {
if (obj instanceof Boolean) {
return Optional.of((Boolean) obj);
} else if (obj instanceof Byte) {
return Optional.of((Byte) obj != 0);
}
return Optional.empty();
}
/**
* Coerce the supplied object to an integer, parse it if necessary.
*
* @param obj Object to coerce
* @return Object as an integer, <code>0</code> if the object is null or
* cannot be parsed
*/
public static int toInteger(@Nullable Object obj) {
if (obj == null) {
return 0;
}
if (obj instanceof Number) {
return ((Number) obj).intValue();
}
final String strObj = Coerce.sanitiseNumber(obj);
final Integer iParsed = Ints.tryParse(strObj);
if (iParsed != null) {
return iParsed;
}
final Double dParsed = Doubles.tryParse(strObj);
return dParsed != null ? dParsed.intValue() : 0;
}
/**
* Gets the given object as a {@link Integer}.
*
* <p>Note that this does not translate numbers spelled out as strings.</p>
*
* @param obj The object to translate
* @return The integer value, if available
*/
public static Optional<Integer> asInteger(@Nullable Object obj) {
if (obj == null) {
// fail fast
return Optional.empty();
}
if (obj instanceof Number) {
return Optional.of(((Number) obj).intValue());
}
try {
return Optional.ofNullable(Integer.valueOf(obj.toString()));
} catch (NumberFormatException | NullPointerException e) {
// do nothing
}
final String strObj = Coerce.sanitiseNumber(obj);
final Integer iParsed = Ints.tryParse(strObj);
if (iParsed == null) {
final Double dParsed = Doubles.tryParse(strObj);
// try parsing as double now
return dParsed == null ? Optional.<Integer>empty() : Optional.of(dParsed.intValue());
}
return Optional.of(iParsed);
}
/**
* Coerce the supplied object to a double-precision floating-point number,
* parse it if necessary.
*
* @param obj Object to coerce
* @return Object as a double, <code>0.0</code> if the object is null or
* cannot be parsed
*/
public static double toDouble(@Nullable Object obj) {
if (obj == null) {
return 0.0;
}
if (obj instanceof Number) {
return ((Number) obj).doubleValue();
}
final Double parsed = Doubles.tryParse(Coerce.sanitiseNumber(obj));
return parsed != null ? parsed : 0.0;
}
/**
* Gets the given object as a {@link Double}.
*
* <p>Note that this does not translate numbers spelled out as strings.</p>
*
* @param obj The object to translate
* @return The double value, if available
*/
public static Optional<Double> asDouble(@Nullable Object obj) {
if (obj == null) {
// fail fast
return Optional.empty();
}
if (obj instanceof Number) {
return Optional.of(((Number) obj).doubleValue());
}
try {
return Optional.ofNullable(Double.valueOf(obj.toString()));
} catch (NumberFormatException | NullPointerException e) {
// do nothing
}
final String strObj = Coerce.sanitiseNumber(obj);
final Double dParsed = Doubles.tryParse(strObj);
// try parsing as double now
return dParsed == null ? Optional.<Double>empty() : Optional.of(dParsed);
}
/**
* Coerce the supplied object to a single-precision floating-point number,
* parse it if necessary.
*
* @param obj Object to coerce
* @return Object as a float, <code>0.0</code> if the object is null or
* cannot be parsed
*/
public static float toFloat(@Nullable Object obj) {
if (obj == null) {
return 0.0f;
}
if (obj instanceof Number) {
return ((Number) obj).floatValue();
}
final Float parsed = Floats.tryParse(Coerce.sanitiseNumber(obj));
return parsed != null ? parsed : 0.0f;
}
/**
* Gets the given object as a {@link Float}.
*
* <p>Note that this does not translate numbers spelled out as strings.</p>
*
* @param obj The object to translate
* @return The float value, if available
*/
public static Optional<Float> asFloat(@Nullable Object obj) {
if (obj == null) {
// fail fast
return Optional.empty();
}
if (obj instanceof Number) {
return Optional.of(((Number) obj).floatValue());
}
try {
return Optional.ofNullable(Float.valueOf(obj.toString()));
} catch (NumberFormatException | NullPointerException e) {
// do nothing
}
final String strObj = Coerce.sanitiseNumber(obj);
final Double dParsed = Doubles.tryParse(strObj);
return dParsed == null ? Optional.<Float>empty() : Optional.of(dParsed.floatValue());
}
/**
* Coerce the supplied object to a short number, parse it if necessary.
*
* @param obj Object to coerce
* @return Object as a short, <code>0</code> if the object is null or cannot
* be parsed
*/
public static short toShort(@Nullable Object obj) {
if (obj == null) {
return 0;
}
if (obj instanceof Number) {
return ((Number) obj).shortValue();
}
try {
return Short.parseShort(Coerce.sanitiseNumber(obj));
} catch (NumberFormatException e) {
return 0;
}
}
/**
* Gets the given object as a {@link Short}.
*
* <p>Note that this does not translate numbers spelled out as strings.</p>
*
* @param obj The object to translate
* @return The short value, if available
*/
public static Optional<Short> asShort(@Nullable Object obj) {
if (obj == null) {
// fail fast
return Optional.empty();
}
if (obj instanceof Number) {
return Optional.of(((Number) obj).shortValue());
}
try {
return Optional.ofNullable(Short.parseShort(Coerce.sanitiseNumber(obj)));
} catch (NumberFormatException | NullPointerException e) {
// do nothing
}
return Optional.empty();
}
/**
* Coerce the supplied object to a byte number, parse it if necessary.
*
* @param obj Object to coerce
* @return Object as a byte, <code>0</code> if the object is null or cannot
* be parsed
*/
public static byte toByte(@Nullable Object obj) {
if (obj == null) {
return 0;
}
if (obj instanceof Number) {
return ((Number) obj).byteValue();
}
try {
return Byte.parseByte(Coerce.sanitiseNumber(obj));
} catch (NumberFormatException e) {
return 0;
}
}
/**
* Gets the given object as a {@link Byte}.
*
* <p>Note that this does not translate numbers spelled out as strings.</p>
*
* @param obj The object to translate
* @return The byte value, if available
*/
public static Optional<Byte> asByte(@Nullable Object obj) {
if (obj == null) {
// fail fast
return Optional.empty();
}
if (obj instanceof Number) {
return Optional.of(((Number) obj).byteValue());
}
try {
return Optional.ofNullable(Byte.parseByte(Coerce.sanitiseNumber(obj)));
} catch (NumberFormatException | NullPointerException e) {
// do nothing
}
return Optional.empty();
}
/**
* Coerce the supplied object to a long number, parse it if necessary.
*
* @param obj Object to coerce
* @return Object as a long, <code>0</code> if the object is null or cannot
* be parsed
*/
public static long toLong(@Nullable Object obj) {
if (obj == null) {
return 0;
}
if (obj instanceof Number) {
return ((Number) obj).longValue();
}
try {
return Long.parseLong(Coerce.sanitiseNumber(obj));
} catch (NumberFormatException e) {
return 0;
}
}
/**
* Gets the given object as a {@link Long}.
*
* <p>Note that this does not translate numbers spelled out as strings.</p>
*
* @param obj The object to translate
* @return The long value, if available
*/
public static Optional<Long> asLong(@Nullable Object obj) {
if (obj == null) {
// fail fast
return Optional.empty();
}
if (obj instanceof Number) {
return Optional.of(((Number) obj).longValue());
}
try {
return Optional.ofNullable(Long.parseLong(Coerce.sanitiseNumber(obj)));
} catch (NumberFormatException | NullPointerException e) {
// do nothing
}
return Optional.empty();
}
/**
* Coerce the supplied object to a character, parse it if necessary.
*
* @param obj Object to coerce
* @return Object as a character, <code>'\u0000'</code> if the object is
* null or cannot be parsed
*/
public static char toChar(@Nullable Object obj) {
if (obj == null) {
return 0;
}
if (obj instanceof Character) {
return (Character) obj;
}
try {
return obj.toString().charAt(0);
} catch (Exception e) {
// do nothing
}
return '\u0000';
}
/**
* Gets the given object as a {@link Character}.
*
* @param obj The object to translate
* @return The character, if available
*/
public static Optional<Character> asChar(@Nullable Object obj) {
if (obj == null) {
return Optional.empty();
}
if (obj instanceof Character) {
return Optional.of((Character) obj);
}
try {
return Optional.of(obj.toString().charAt(0));
} catch (Exception e) {
// do nothing
}
return Optional.empty();
}
/**
* Coerce the specified object to an enum of the supplied type, returns the
* first enum constant in the enum if parsing fails.
*
* @param obj Object to coerce
* @param enumClass Enum class to coerce to
* @param <E> enum type
* @return Coerced enum value
*/
public static <E extends Enum<E>> E toEnum(@Nullable Object obj, Class<E> enumClass) {
return Coerce.toEnum(obj, enumClass, enumClass.getEnumConstants()[0]);
}
/**
* Coerce the specified object to an enum of the supplied type, returns the
* specified default value if parsing fails.
*
* @param obj Object to coerce
* @param enumClass Enum class to coerce to
* @param defaultValue default value to return if coercion fails
* @param <E> enum type
* @return Coerced enum value
*/
public static <E extends Enum<E>> E toEnum(@Nullable Object obj, Class<E> enumClass, E defaultValue) {
Objects.requireNonNull(enumClass, "enumClass");
Objects.requireNonNull(defaultValue, "defaultValue");
if (obj == null) {
return defaultValue;
}
if (enumClass.isAssignableFrom(obj.getClass())) {
@SuppressWarnings("unchecked")
final E enumObj = (E) obj;
return enumObj;
}
final String strObj = obj.toString().trim();
try {
// Efficient but case-sensitive lookup in the constant map
return Enum.valueOf(enumClass, strObj);
} catch (IllegalArgumentException ex) {
// fall through
}
// Try a case-insensitive lookup
for (E value : enumClass.getEnumConstants()) {
if (value.name().equalsIgnoreCase(strObj)) {
return value;
}
}
return defaultValue;
}
/**
* Coerce the specified object to the specified pseudo-enum type using the
* supplied pseudo-enum dictionary class.
*
* @param obj Object to coerce
* @param pseudoEnumClass The pseudo-enum class
* @param dictionaryClass Pseudo-enum dictionary class to look in
* @param defaultValue Value to return if lookup fails
* @param <T> pseudo-enum type
* @return Coerced value or default if coercion fails
*/
public static <T> T toPseudoEnum(@Nullable Object obj, Class<T> pseudoEnumClass, Class<?> dictionaryClass, T defaultValue) {
Objects.requireNonNull(pseudoEnumClass, "pseudoEnumClass");
Objects.requireNonNull(dictionaryClass, "dictionaryClass");
Objects.requireNonNull(defaultValue, "defaultValue");
if (obj == null) {
return defaultValue;
}
if (pseudoEnumClass.isAssignableFrom(obj.getClass())) {
@SuppressWarnings("unchecked")
final T enumObj = (T) obj;
return enumObj;
}
final String strObj = obj.toString().trim();
try {
for (Field field : dictionaryClass.getFields()) {
if ((field.getModifiers() & Modifier.STATIC) != 0 && pseudoEnumClass.isAssignableFrom(field.getType())) {
final String fieldName = field.getName();
@SuppressWarnings("unchecked")
final T entry = (T) field.get(null);
if (strObj.equalsIgnoreCase(fieldName)) {
return entry;
}
}
}
} catch (Exception ex) {
// well that went badly
}
return defaultValue;
}
/**
* Coerce the supplied object to a Vector2i.
*
* @param obj Object to coerce
* @return Vector2i, returns Vector2i.ZERO if coercion failed
*/
public static Vector2i toVector2i(@Nullable Object obj) {
if (obj == null) {
return Vector2i.ZERO;
}
if (obj instanceof Vectorl) {
obj = ((Vectorl) obj).toInt();
} else if (obj instanceof Vectorf) {
obj = ((Vectorf) obj).toInt();
} else if (obj instanceof Vectord) {
obj = ((Vectord) obj).toInt();
}
if (obj instanceof Vector2i) {
return (Vector2i) obj;
} else if (obj instanceof Vector3i) {
return new Vector2i((Vector3i) obj);
} else if (obj instanceof Vector4i) {
return new Vector2i((Vector4i) obj);
} else if (obj instanceof VectorNi) {
return new Vector2i((VectorNi) obj);
}
final Matcher vecMatch = Coerce.vector2Pattern.matcher(obj.toString());
if (Coerce.listBracketsMatch(vecMatch)) {
return new Vector2i(Integer.parseInt(vecMatch.group(1)), Integer.parseInt(vecMatch.group(2)));
}
final List<?> list = Coerce.toList(obj);
if (list.size() == 2) {
return new Vector2i(Coerce.toInteger(list.get(0)), Coerce.toInteger(list.get(1)));
}
return Vector2i.ZERO;
}
/**
* Sanitise a string containing a common representation of a number to make
* it parsable. Strips thousand-separating commas and trims later members
* of a comma-separated list. For example the string "(9.5, 10.6, 33.2)"
* will be sanitised to "9.5".
*
* @param obj Object to sanitise
* @return Sanitised number-format string to parse
*/
private static String sanitiseNumber(Object obj) {
String string = obj.toString().trim();
if (string.length() < 1) {
return "0";
}
final Matcher candidate = Coerce.listPattern.matcher(string);
if (Coerce.listBracketsMatch(candidate)) {
string = candidate.group(2).trim();
}
final int decimal = string.indexOf('.');
final int comma = string.indexOf(',', decimal);
if (decimal > -1 && comma > -1) {
return Coerce.sanitiseNumber(string.substring(0, comma));
}
if (string.indexOf('-', 1) != -1) {
return "0";
}
return string.replace(",", "").split(" ", 0)[0];
}
private static boolean listBracketsMatch(Matcher candidate) {
return candidate.matches() && Coerce.listPairings[0].indexOf(candidate.group(1)) == Coerce.listPairings[1].indexOf(candidate.group(3));
}
private static List<?> primitiveArrayToList(Object obj) {
if (obj instanceof boolean[]) {
return Booleans.asList((boolean[]) obj);
} else if (obj instanceof char[]) {
return Chars.asList((char[]) obj);
} else if (obj instanceof byte[]) {
return Bytes.asList((byte[]) obj);
} else if (obj instanceof short[]) {
return Shorts.asList((short[]) obj);
} else if (obj instanceof int[]) {
return Ints.asList((int[]) obj);
} else if (obj instanceof long[]) {
return Longs.asList((long[]) obj);
} else if (obj instanceof float[]) {
return Floats.asList((float[]) obj);
} else if (obj instanceof double[]) {
return Doubles.asList((double[]) obj);
}
return Collections.<Object>emptyList();
}
private static List<?> parseStringToList(String string) {
final Matcher candidate = Coerce.listPattern.matcher(string);
if (!Coerce.listBracketsMatch(candidate)) {
return Collections.<Object>emptyList();
}
final List<String> list = Lists.newArrayList();
for (final String part : candidate.group(2).split(",", -1)) {
list.add(part);
}
return list;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.DefaultCoder;
import org.apache.beam.sdk.io.AvroIO.Write.Bound;
import org.apache.beam.sdk.testing.NeedsRunner;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.util.IOChannelUtils;
import org.apache.beam.sdk.values.PCollection;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.reflect.Nullable;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* Tests for AvroIO Read and Write transforms.
*/
@RunWith(JUnit4.class)
public class AvroIOTest {
@Rule
public TemporaryFolder tmpFolder = new TemporaryFolder();
@Test
public void testReadWithoutValidationFlag() throws Exception {
AvroIO.Read.Bound<GenericRecord> read = AvroIO.Read.from("gs://bucket/foo*/baz");
assertTrue(read.needsValidation());
assertFalse(read.withoutValidation().needsValidation());
}
@Test
public void testWriteWithoutValidationFlag() throws Exception {
AvroIO.Write.Bound<GenericRecord> write = AvroIO.Write.to("gs://bucket/foo/baz");
assertTrue(write.needsValidation());
assertFalse(write.withoutValidation().needsValidation());
}
@Test
public void testAvroIOGetName() {
assertEquals("AvroIO.Read", AvroIO.Read.from("gs://bucket/foo*/baz").getName());
assertEquals("AvroIO.Write", AvroIO.Write.to("gs://bucket/foo/baz").getName());
}
@DefaultCoder(AvroCoder.class)
static class GenericClass {
int intField;
String stringField;
public GenericClass() {}
public GenericClass(int intValue, String stringValue) {
this.intField = intValue;
this.stringField = stringValue;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("intField", intField)
.add("stringField", stringField)
.toString();
}
@Override
public int hashCode() {
return Objects.hash(intField, stringField);
}
@Override
public boolean equals(Object other) {
if (other == null || !(other instanceof GenericClass)) {
return false;
}
GenericClass o = (GenericClass) other;
return Objects.equals(intField, o.intField) && Objects.equals(stringField, o.stringField);
}
}
@Test
@Category(NeedsRunner.class)
public void testAvroIOWriteAndReadASingleFile() throws Throwable {
TestPipeline p = TestPipeline.create();
List<GenericClass> values = ImmutableList.of(new GenericClass(3, "hi"),
new GenericClass(5, "bar"));
File outputFile = tmpFolder.newFile("output.avro");
p.apply(Create.of(values))
.apply(AvroIO.Write.to(outputFile.getAbsolutePath())
.withoutSharding()
.withSchema(GenericClass.class));
p.run();
p = TestPipeline.create();
PCollection<GenericClass> input = p
.apply(AvroIO.Read.from(outputFile.getAbsolutePath()).withSchema(GenericClass.class));
PAssert.that(input).containsInAnyOrder(values);
p.run();
}
@DefaultCoder(AvroCoder.class)
static class GenericClassV2 {
int intField;
String stringField;
@Nullable String nullableField;
public GenericClassV2() {}
public GenericClassV2(int intValue, String stringValue, String nullableValue) {
this.intField = intValue;
this.stringField = stringValue;
this.nullableField = nullableValue;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("intField", intField)
.add("stringField", stringField)
.add("nullableField", nullableField)
.toString();
}
@Override
public int hashCode() {
return Objects.hash(intField, stringField, nullableField);
}
@Override
public boolean equals(Object other) {
if (other == null || !(other instanceof GenericClassV2)) {
return false;
}
GenericClassV2 o = (GenericClassV2) other;
return Objects.equals(intField, o.intField)
&& Objects.equals(stringField, o.stringField)
&& Objects.equals(nullableField, o.nullableField);
}
}
/**
* Tests that {@code AvroIO} can read an upgraded version of an old class, as long as the
* schema resolution process succeeds. This test covers the case when a new, {@code @Nullable}
* field has been added.
*
* <p>For more information, see http://avro.apache.org/docs/1.7.7/spec.html#Schema+Resolution
*/
@Test
@Category(NeedsRunner.class)
public void testAvroIOWriteAndReadSchemaUpgrade() throws Throwable {
TestPipeline p = TestPipeline.create();
List<GenericClass> values = ImmutableList.of(new GenericClass(3, "hi"),
new GenericClass(5, "bar"));
File outputFile = tmpFolder.newFile("output.avro");
p.apply(Create.of(values))
.apply(AvroIO.Write.to(outputFile.getAbsolutePath())
.withoutSharding()
.withSchema(GenericClass.class));
p.run();
List<GenericClassV2> expected = ImmutableList.of(new GenericClassV2(3, "hi", null),
new GenericClassV2(5, "bar", null));
p = TestPipeline.create();
PCollection<GenericClassV2> input = p
.apply(AvroIO.Read.from(outputFile.getAbsolutePath()).withSchema(GenericClassV2.class));
PAssert.that(input).containsInAnyOrder(expected);
p.run();
}
@SuppressWarnings("deprecation") // using AvroCoder#createDatumReader for tests.
private void runTestWrite(String[] expectedElements, int numShards) throws IOException {
File baseOutputFile = new File(tmpFolder.getRoot(), "prefix");
String outputFilePrefix = baseOutputFile.getAbsolutePath();
TestPipeline p = TestPipeline.create();
Bound<String> write = AvroIO.Write.to(outputFilePrefix).withSchema(String.class);
if (numShards > 1) {
write = write.withNumShards(numShards);
} else {
write = write.withoutSharding();
}
p.apply(Create.<String>of(expectedElements)).apply(write);
p.run();
String shardNameTemplate = write.getShardNameTemplate();
assertTestOutputs(expectedElements, numShards, outputFilePrefix, shardNameTemplate);
}
public static void assertTestOutputs(
String[] expectedElements, int numShards, String outputFilePrefix, String shardNameTemplate)
throws IOException {
// Validate that the data written matches the expected elements in the expected order
List<File> expectedFiles = new ArrayList<>();
for (int i = 0; i < numShards; i++) {
expectedFiles.add(
new File(
IOChannelUtils.constructName(
outputFilePrefix, shardNameTemplate, "" /* no suffix */, i, numShards)));
}
List<String> actualElements = new ArrayList<>();
for (File outputFile : expectedFiles) {
assertTrue("Expected output file " + outputFile.getName(), outputFile.exists());
try (DataFileReader<String> reader =
new DataFileReader<>(outputFile, AvroCoder.of(String.class).createDatumReader())) {
Iterators.addAll(actualElements, reader);
}
}
assertThat(actualElements, containsInAnyOrder(expectedElements));
}
@Test
@Category(NeedsRunner.class)
public void testAvroSinkWrite() throws Exception {
String[] expectedElements = new String[] {"first", "second", "third"};
runTestWrite(expectedElements, 1);
}
@Test
@Category(NeedsRunner.class)
public void testAvroSinkShardedWrite() throws Exception {
String[] expectedElements = new String[] {"first", "second", "third", "fourth", "fifth"};
runTestWrite(expectedElements, 4);
}
// TODO: for Write only, test withSuffix,
// withShardNameTemplate and withoutSharding.
@Test
public void testReadDisplayData() {
AvroIO.Read.Bound<?> read = AvroIO.Read.from("foo.*")
.withoutValidation();
DisplayData displayData = DisplayData.from(read);
assertThat(displayData, hasDisplayItem("filePattern", "foo.*"));
assertThat(displayData, hasDisplayItem("validation", false));
}
@Test
public void testWriteDisplayData() {
AvroIO.Write.Bound<?> write = AvroIO.Write
.to("foo")
.withShardNameTemplate("-SS-of-NN-")
.withSuffix("bar")
.withSchema(GenericClass.class)
.withNumShards(100)
.withoutValidation();
DisplayData displayData = DisplayData.from(write);
assertThat(displayData, hasDisplayItem("filePrefix", "foo"));
assertThat(displayData, hasDisplayItem("shardNameTemplate", "-SS-of-NN-"));
assertThat(displayData, hasDisplayItem("fileSuffix", "bar"));
assertThat(displayData, hasDisplayItem("schema", GenericClass.class));
assertThat(displayData, hasDisplayItem("numShards", 100));
assertThat(displayData, hasDisplayItem("validation", false));
}
}
| |
package com.zegoggles.smssync.preferences;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.util.Base64;
import android.util.Log;
import com.fsck.k9.mail.AuthType;
import com.zegoggles.smssync.R;
import com.zegoggles.smssync.auth.OAuth2Client;
import com.zegoggles.smssync.auth.TokenRefresher;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Locale;
import static android.util.Base64.NO_WRAP;
import static com.zegoggles.smssync.App.TAG;
import static com.zegoggles.smssync.preferences.Preferences.getDefaultType;
public class AuthPreferences {
private static final String UTF_8 = "UTF-8";
private final Context context;
private final SharedPreferences preferences;
private SharedPreferences credentials;
public static final String SERVER_AUTHENTICATION = "server_authentication";
private static final String OAUTH2_USER = "oauth2_user";
private static final String OAUTH2_TOKEN = "oauth2_token";
private static final String OAUTH2_REFRESH_TOKEN = "oauth2_refresh_token";
public static final String IMAP_USER = "login_user";
public static final String IMAP_PASSWORD = "login_password";
/**
* Preference key containing the server address
*/
public static final String SERVER_ADDRESS = "server_address";
/**
* Preference key containing the server protocol
*/
private static final String SERVER_PROTOCOL = "server_protocol";
private static final String SERVER_TRUST_ALL_CERTIFICATES = "server_trust_all_certificates";
/**
* IMAP URI.
*
* This should be in the form of:
* <ol>
* <li><code>imap+ssl+://XOAUTH2:ENCODED_USERNAME:ENCODED_TOKEN@imap.gmail.com:993</code></li>
* <li><code>imap+ssl+://XOAUTH:ENCODED_USERNAME:ENCODED_TOKEN@imap.gmail.com:993</code></li>
* <li><code>imap+ssl+://PLAIN:ENCODED_USERNAME:ENCODED_PASSWOR@imap.gmail.com:993</code></li>
* <li><code>imap://PLAIN:ENCODED_USERNAME:ENCODED_PASSWOR@imap.gmail.com:993</code></li>
* <li><code>imap://PLAIN:ENCODED_USERNAME:ENCODED_PASSWOR@imap.gmail.com</code></li>
* </ol>
*/
private static final String IMAP_URI = "imap%s://%s:%s:%s@%s";
private static final String DEFAULT_SERVER_ADDRESS = "imap.gmail.com:993";
private static final String DEFAULT_SERVER_PROTOCOL = "+ssl+";
public AuthPreferences(Context context) {
this.context = context.getApplicationContext();
this.preferences = PreferenceManager.getDefaultSharedPreferences(context);
}
public String getOauth2Token() {
return getCredentials().getString(OAUTH2_TOKEN, null);
}
public String getOauth2RefreshToken() {
return getCredentials().getString(OAUTH2_REFRESH_TOKEN, null);
}
public boolean hasOAuth2Tokens() {
return getOauth2Username() != null &&
getOauth2Token() != null;
}
public void setOauth2Token(String username, String accessToken, String refreshToken) {
preferences.edit()
.putString(OAUTH2_USER, username)
.commit();
getCredentials().edit()
.putString(OAUTH2_TOKEN, accessToken)
.commit();
getCredentials().edit()
.putString(OAUTH2_REFRESH_TOKEN, refreshToken)
.commit();
}
public void clearOauth2Data() {
final String oauth2token = getOauth2Token();
preferences.edit()
.remove(OAUTH2_USER)
.commit();
getCredentials().edit()
.remove(OAUTH2_TOKEN)
.remove(OAUTH2_REFRESH_TOKEN)
.commit();
if (!TextUtils.isEmpty(oauth2token)) {
new TokenRefresher(context, new OAuth2Client(getOAuth2ClientId()), this).invalidateToken(oauth2token);
}
}
public String getOAuth2ClientId() {
return context.getString(R.string.oauth2_client_id);
}
public void setImapPassword(String s) {
getCredentials().edit().putString(IMAP_PASSWORD, s).commit();
}
public void setImapUser(String s) {
preferences.edit().putString(IMAP_USER, s).commit();
}
public boolean useXOAuth() {
return getAuthMode() == AuthMode.XOAUTH;
}
public String getUserEmail() {
switch (getAuthMode()) {
case XOAUTH:
return getOauth2Username();
default:
return getImapUsername();
}
}
public boolean isLoginInformationSet() {
switch (getAuthMode()) {
case PLAIN:
return !TextUtils.isEmpty(getImapPassword()) &&
!TextUtils.isEmpty(getImapUsername()) &&
!TextUtils.isEmpty(getServerAddress());
case XOAUTH:
return hasOAuth2Tokens();
default:
return false;
}
}
public String getStoreUri() {
if (useXOAuth()) {
if (hasOAuth2Tokens()) {
return formatUri(
AuthType.XOAUTH2,
DEFAULT_SERVER_PROTOCOL,
getOauth2Username(),
generateXOAuth2Token(),
DEFAULT_SERVER_ADDRESS);
} else {
Log.w(TAG, "No valid xoauth2 tokens");
return null;
}
} else {
return formatUri(AuthType.PLAIN,
getServerProtocol(),
getImapUsername(),
getImapPassword(),
getServerAddress());
}
}
private String getServerAddress() {
return preferences.getString(SERVER_ADDRESS, DEFAULT_SERVER_ADDRESS);
}
private String getServerProtocol() {
return preferences.getString(SERVER_PROTOCOL, DEFAULT_SERVER_PROTOCOL);
}
public boolean isTrustAllCertificates() {
return preferences.getBoolean(SERVER_TRUST_ALL_CERTIFICATES, false);
}
private String formatUri(AuthType authType, String serverProtocol, String username, String password, String serverAddress) {
return String.format(IMAP_URI,
serverProtocol,
authType.name().toUpperCase(Locale.US),
// NB: there's a bug in K9mail-library which requires double-encoding of uris
// https://github.com/k9mail/k-9/commit/b0d401c3b73c6b57402dc81d3cfd6488a71a1b98
encode(encode(username)),
encode(encode(password)),
serverAddress);
}
public String getOauth2Username() {
return preferences.getString(OAUTH2_USER, null);
}
private AuthMode getAuthMode() {
return getDefaultType(preferences, SERVER_AUTHENTICATION, AuthMode.class, AuthMode.XOAUTH);
}
// All sensitive information is stored in a separate prefs file so we can
// backup the rest without exposing sensitive data
private SharedPreferences getCredentials() {
if (credentials == null) {
credentials = context.getSharedPreferences("credentials", Context.MODE_PRIVATE);
}
return credentials;
}
public String getServername() {
return preferences.getString(SERVER_ADDRESS, null);
}
public String getImapUsername() {
return preferences.getString(IMAP_USER, null);
}
private String getImapPassword() {
return getCredentials().getString(IMAP_PASSWORD, null);
}
/**
* TODO: this should probably be handled in K9
*
* <p>
* The SASL XOAUTH2 initial client response has the following format:
* </p>
* <code>base64("user="{User}"^Aauth=Bearer "{Access Token}"^A^A")</code>
* <p>
* For example, before base64-encoding, the initial client response might look like this:
* </p>
* <code>user=someuser@example.com^Aauth=Bearer vF9dft4qmTc2Nvb3RlckBhdHRhdmlzdGEuY29tCg==^A^A</code>
* <p/>
* <em>Note:</em> ^A represents a Control+A (\001).
*
* @see <a href="https://developers.google.com/google-apps/gmail/xoauth2_protocol#the_sasl_xoauth2_mechanism">
* The SASL XOAUTH2 Mechanism</a>
*/
private @NonNull String generateXOAuth2Token() {
final String username = getOauth2Username();
final String token = getOauth2Token();
final String formatted = "user=" + username + "\001auth=Bearer " + token + "\001\001";
try {
return Base64.encodeToString(formatted.getBytes(UTF_8), NO_WRAP);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
private static String encode(String s) {
try {
return s == null ? "" : URLEncoder.encode(s, UTF_8);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public void migrate() {
if (useXOAuth()) {
return;
}
// convert deprecated authentication methods
if ("+ssl".equals(getServerProtocol()) ||
"+tls".equals(getServerProtocol())) {
preferences.edit()
.putBoolean(SERVER_TRUST_ALL_CERTIFICATES, true)
.putString(SERVER_PROTOCOL, getServerProtocol()+"+")
.commit();
}
}
}
| |
package org.scn.community.htmlgenerator;
import java.io.File;
import java.util.ArrayList;
import javax.management.RuntimeErrorException;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.scn.community.defgenerator.ZtlAndAps;
import org.scn.community.defgenerator.ParamSimpleSpec;
import org.scn.community.spec.ParamFullSpec;
import org.scn.community.spec.orgin.OrginSpec;
import org.scn.community.ui5.UI5Property;
import org.scn.community.utils.Helpers;
public class Property {
String name;
private String title;
private String tooltip;
String type;
String visible;
private String group;
private final ArrayList<Value> values = new ArrayList<Value>();
private String defaultValue;
private ParamSimpleSpec extendedSimpleSpec;
private ParamFullSpec extendedFullSpec;
private String componentName;
private String titleBig;
private String tooltipBig;
private String correctName;
private File specFile;
@SuppressWarnings("nls")
public Property(XMLStreamReader reader, String componentName, File specFile) {
this.componentName = componentName;
this.specFile = specFile;
this.name = reader.getAttributeValue("", "id");
this.title = reader.getAttributeValue("", "title");
this.tooltip = reader.getAttributeValue("", "tooltip");
this.type = reader.getAttributeValue("", "type");
this.visible = reader.getAttributeValue("", "visible");
this.group = reader.getAttributeValue("", "group");
this.defaultValue = "";
if (this.visible == null) {
this.visible = "true";
}
if (this.name == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'name'");
this.name = "";
}
if (this.title == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'title'");
this.title = "";
}
if (this.tooltip == null) {
// System.out.println("ISSUE: " + componentName + "Property '" + this.name + "' is missing 'tooltip'");
this.tooltip = this.title;
}
if (this.type == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'type'");
this.type = "";
}
if (this.group == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'group'");
// throw new RuntimeException("Group Must Be Assigned!");
this.group = "Display";
}
this.titleBig = Helpers.makeAllUpper(this.title);
this.tooltipBig = Helpers.makeAllUpper(this.tooltip);
}
public Property(String componentName, String propertyName, File specFile) {
this.componentName = componentName;
this.name = propertyName;
this.specFile = specFile;
}
public Property(UI5Property ui5property, File specFile) {
this.componentName = ui5property.getComponentName();
this.specFile = specFile;
this.name = ui5property.getAttr("name");
this.title = ui5property.getAttr("name");
this.tooltip = ui5property.getAttr("name");
this.type = ui5property.getAttr("name");
this.visible = "true";
this.group = "Ui5";
this.defaultValue = "";
if (this.visible == null) {
this.visible = "true";
}
if (this.name == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'name'");
this.name = "";
}
if (this.title == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'title'");
this.title = "";
}
if (this.tooltip == null) {
// System.out.println("ISSUE: " + componentName + "Property '" + this.name + "' is missing 'tooltip'");
this.tooltip = this.title;
}
if (this.type == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'type'");
this.type = "";
}
if (this.group == null) {
System.out.println("ISSUE: " + componentName + " - PARAMETER - property '" + this.name + "' is missing 'group'");
// throw new RuntimeException("Group Must Be Assigned!");
this.group = "Display";
}
this.titleBig = Helpers.makeAllUpper(this.title);
this.tooltipBig = Helpers.makeAllUpper(this.tooltip);
}
@Override
public String toString() {
return "\r\nProperty \r\n\t[\r\n\t\tname=" + name + ", \r\n\t\ttitle=" + title + ", \r\n\t\ttooltip="
+ tooltip + ", \r\n\t\ttype=" + type + ", \r\n\t\tvisible=" + visible
+ ", \r\n\t\tgroup=" + group + ", \r\n\t\tvalues=" + values + ", \r\n\t\tdefaultValue="
+ defaultValue + ", \r\n\t\textendedSimpleSpec=" + extendedSimpleSpec
+ ", \r\n\t\textendedFullSpec=" + extendedFullSpec + ", \r\n\t\tcomponentName="
+ componentName + ", \r\n\t\ttitleBig=" + titleBig + ", \r\n\t\ttooltipBig="
+ tooltipBig + ", \r\n\t\tcorrectName=" + correctName + "]";
}
public String toHtml() {
String template = Helpers.resource2String(this.getClass(), "property.html");
String templateValuesList = Helpers.resource2String(this.getClass(), "values_list.html");
template = template.replace("%PROPERTY_NAME%", this.name);
template = template.replace("%PROPERTY_TITLE%", this.title);
template = template.replace("%PROPERTY_TOOLTIP%", this.tooltip);
template = template.replace("%PROPERTY_TYPE%", this.type);
template = template.replace("%PROPERTY_GROUP%", this.group);
template = template.replace("%VISIBLE_FLAG%", this.visible);
String htmlDefault = this.defaultValue;
if (htmlDefault == "") {
htmlDefault = " ";
}
// cut default value in case too long
template = template.replace("%DEFAULT_VALUE%", htmlDefault.length() < 100 ? htmlDefault : htmlDefault.substring(0, 100) + " ... truncated");
boolean defaltInList = false;
if (this.values.size() > 0) {
for (Value value : this.values) {
templateValuesList = templateValuesList.replace("%VALUE_ENTRY%", value.toHtml() + "\r\n" + "%VALUE_ENTRY%");
if(this.defaultValue.equals(value.getName())) {
defaltInList = true;
}
}
templateValuesList = templateValuesList.replace("%VALUE_ENTRY%", "");
template = template.replace("%VALUES_LIST%", templateValuesList);
} else {
defaltInList = true;
}
if(!defaltInList) {
throw new RuntimeException("default "+this.defaultValue+" is not in value list! " + this.name + ", in component " + this.componentName);
}
template = template.replace("%VALUES_LIST%", "");
return template;
}
public void addValue(XMLStreamReader reader) {
try {
String elementText = reader.getElementText();
this.values.add(new Value(elementText, elementText));
} catch (XMLStreamException e) {
// TODO Auto-generated catch block
}
}
public void setDefaultValue(XMLStreamReader reader) {
try {
String defaultValue = reader.getElementText();
this.defaultValue = defaultValue;
for (Value value : this.values) {
if (value.name.equals(defaultValue)) {
value.isDefault = true;
}
}
} catch (XMLStreamException e) {
// TODO Auto-generated catch block
}
}
public void extendSpec(ParamSimpleSpec parameter) {
this.extendedSimpleSpec = parameter;
}
public void extendSpec(ParamFullSpec parameter) {
this.extendedFullSpec = parameter;
}
public boolean hasExtendSpec() {
return (this.extendedSimpleSpec != null) || (this.extendedFullSpec != null);
}
public ZtlAndAps generateZtlAndAps() {
if(this.extendedSimpleSpec != null) {
return this.extendedSimpleSpec.getFunctions();
}
if(this.extendedFullSpec != null) {
return this.extendedFullSpec.getFunctions();
}
return null;
}
public String getNameCut() {
String name = this.name;
if (this.extendedFullSpec != null){
name = this.extendedFullSpec.getCorrectName();
}
if(!name.startsWith("D")) {
return name;
}
return name.substring(1);
}
public String getName() {
return this.name;
}
public String getType() {
return this.type;
}
public String getHelp() {
return "/* " + this.tooltip + " */ ";
}
public String getTitle() {
return this.title;
}
public String getComponent() {
return componentName;
}
public ArrayList<Value> getValues() {
return this.values;
}
public String getDefaultValue() {
return this.defaultValue;
}
public ParamFullSpec getExtendedFullSpec() {
return this.extendedFullSpec;
}
public String toSpec20() {
boolean invisible = false;
if(!this.visible.equals("true")) {
// special case for invisible properties
invisible = true;
}
String template = Helpers.resource2String(OrginSpec.class, "org."+this.getType()+".tmpl");
if(this.values.size() > 0) {
template = Helpers.resource2String(OrginSpec.class, "org.Choice.tmpl");
}
if(template == null) {
template = Helpers.resource2String(OrginSpec.class, "org.default.tmpl");
}
template = template.replace("%NAME%", this.name);
template = template.replace("%DESCRIPTION%", this.titleBig);
template = template.replace("%TOOLTIP%", this.tooltip == "" ? this.titleBig : this.tooltipBig);
template = template.replace("%ZTL_TYPE%", this.getType(true));
template = template.replace("%CATEGORY%", this.group);
template = template.replace("%VISIBLE%", this.visible);
if(invisible) {
if(this.name.endsWith("s")) {
// plural case -> a list?
}
template = template.replace("%NO_APS%", "true");
template = template.replace("%NO_ZTL%", "false");
template = template.replace("%ZTL_FUNCTION%", "-get");
} else {
template = template.replace("%ZTL_FUNCTION%", "");
template = template.replace("%NO_APS%", "false");
template = template.replace("%NO_ZTL%", "false");
}
int currentI = 0;
boolean defaultIsInValues = false;
if (this.values.size() > 0) {
for (Value value : this.values) {
template = template.replace("%VALUE_ENTRY%", value.toSpec20() + (++currentI < this.values.size() ? "," : "") + "\r\n\t\t\t\t%VALUE_ENTRY%");
if(value.getName().equals(this.defaultValue)) {
defaultIsInValues = true;
}
}
template = template.replace("%VALUE_ENTRY%", "");
} else {
defaultIsInValues = true;
}
template = template.replace("%VALUE_ENTRY%", "");
if(!defaultIsInValues) {
throw new RuntimeException("default "+this.defaultValue+" is not in value list! " + this.name + ", in component " + this.componentName);
}
if (this.values.size() > 0) {
for (Value value : this.values) {
if(value.isDefault()) {
this.defaultValue = value.getName();
}
}
}
// cut default value in case too long
template = template.replace("%DEFAULT%", this.defaultValue);
return template;
}
public String getType(boolean convertToZtlCompatibility) {
String type = this.type;
if(type.contains(",")) {
type = type.substring(0, type.indexOf(","));
}
if(convertToZtlCompatibility) {
if(type.equals("Url") || type.equals("Color") || type.equals("Choice")) {
type = "String";
}
}
return type;
}
public void setName(String name) {
this.name = name;
}
public String getSpecFile() {
return specFile.getAbsolutePath();
}
public String[] getSubArraySpec20() {
ParamFullSpec spec = this.getExtendedFullSpec();
String parametersJson = "%ENTRY%\r\n";
String parametersList = "";
String parameterMode = spec.getParameter("opts").getPropertyValue("arrayMode");
if(!parameterMode.equals("StringArray")) {
ParamFullSpec parameter = spec.getParameter("opts").getParameter("arrayDefinition").getParameter(0);
String sequence = parameter.getPropertyValue("sequence");
String[] params = sequence.split(",");
ArrayList<ParamFullSpec> parameters = parameter.getParameters();
for (int i = 0; i < params.length; i++) {
String nameRequested = params[i];
if(nameRequested.equals("key") || nameRequested.equals("parentKey") || nameRequested.equals("leaf")) {
continue;
}
for (ParamFullSpec paramFullSpec : parameters) {
String nameChild = paramFullSpec.getName();
if(nameChild.equals(nameRequested)) {
String json = paramFullSpec.getJson();
parametersJson = parametersJson.replace("%ENTRY%", json + "\r\n%ENTRY%");
parametersList = parametersList + nameChild + ",";
break;
}
}
}
}
parametersJson = parametersJson.replace(",\r\n%ENTRY%", "");
if(parametersList.length() > 0) {
parametersList = parametersList.substring(0, parametersList.length()-1);
}
return new String[]{parametersList, parametersJson};
}
}
| |
package org.basex.qt3ts.fn;
import org.basex.tests.bxapi.*;
import org.basex.tests.qt3ts.*;
/**
* Tests for the current-time() function.
*
* @author BaseX Team 2005-15, BSD License
* @author Leo Woerteler
*/
@SuppressWarnings("all")
public class FnCurrentTime extends QT3TestSet {
/**
* A test whose essence is: `current-time("WRONG PARAM")`. .
*/
@org.junit.Test
public void kContextCurrentTimeFunc1() {
final XQuery query = new XQuery(
"current-time(\"WRONG PARAM\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPST0017")
);
}
/**
* Test that the Dynamic Context property 'current dateTime' when presented as a xs:time is stable during execution. .
*/
@org.junit.Test
public void kContextCurrentTimeFunc2() {
final XQuery query = new XQuery(
"current-time() eq current-time()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates a simple call to the fn:current-time" function. Uses a String Value. .
*/
@org.junit.Test
public void fnCurrentTime1() {
final XQuery query = new XQuery(
"fn:current-time()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:time")
);
}
/**
* Evaluates The "current-time" function as an argument to the xs:string function. .
*/
@org.junit.Test
public void fnCurrentTime10() {
final XQuery query = new XQuery(
"xs:string(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:string")
);
}
/**
* Evaluates The "current-time" function as an argument to the timezone-from-time function. .
*/
@org.junit.Test
public void fnCurrentTime11() {
final XQuery query = new XQuery(
"fn:timezone-from-time(current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:dayTimeDuration")
);
}
/**
* Evaluates string value The "current-time" as part of a "numeric-equal" expression (eq operator) .
*/
@org.junit.Test
public void fnCurrentTime12() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) eq fn:string(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as part of an equal expression (ne operator) .
*/
@org.junit.Test
public void fnCurrentTime13() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) ne fn:string(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* Evaluates The "current-time" function as part of an equal expression (le operator) .
*/
@org.junit.Test
public void fnCurrentTime14() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) le fn:string(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as part of an equal expression (ge operator) .
*/
@org.junit.Test
public void fnCurrentTime15() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) ge fn:string(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as part of a boolean expression ("and" opeartor and fn:true function. .
*/
@org.junit.Test
public void fnCurrentTime16() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) and fn:true()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as part of a boolean expression ("and" opeartor and fn:false function. .
*/
@org.junit.Test
public void fnCurrentTime17() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) and fn:false()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* Evaluates The "current-time" function as part of a boolean expression ("or" opeartor and fn:true function. .
*/
@org.junit.Test
public void fnCurrentTime18() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) or fn:true()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as part of a boolean expression ("or" opeartor and fn:false function. .
*/
@org.junit.Test
public void fnCurrentTime19() {
final XQuery query = new XQuery(
"fn:string(fn:current-time()) or fn:false()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as argument to fn:hours-from-time function. .
*/
@org.junit.Test
public void fnCurrentTime2() {
final XQuery query = new XQuery(
"fn:hours-from-time(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:integer")
);
}
/**
* Evaluates The "current-time" function (string value)as an argument to the fn:not function. .
*/
@org.junit.Test
public void fnCurrentTime20() {
final XQuery query = new XQuery(
"fn:not(fn:string(fn:current-time()))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* Test that the current time has a timezone.
*/
@org.junit.Test
public void fnCurrentTime21() {
final XQuery query = new XQuery(
"exists(timezone-from-time(current-time()))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test that the current time is the same as the time part of current dateTime.
*/
@org.junit.Test
public void fnCurrentTime22() {
final XQuery query = new XQuery(
"current-time() = xs:time(current-dateTime())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Evaluates The "current-time" function as argument to fn-minutes-from-time-function. .
*/
@org.junit.Test
public void fnCurrentTime3() {
final XQuery query = new XQuery(
"fn:minutes-from-time(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:integer")
);
}
/**
* Evaluates The "current-time" function used as as argument to "seconds-from-time" function. .
*/
@org.junit.Test
public void fnCurrentTime4() {
final XQuery query = new XQuery(
"fn:seconds-from-time(fn:current-time())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:decimal")
);
}
/**
* Evaluates The "current-time" function as part of a "-" operation. .
*/
@org.junit.Test
public void fnCurrentTime5() {
final XQuery query = new XQuery(
"fn:current-time() - fn:current-time()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "PT0S")
);
}
/**
* Evaluates The "fn-current-time" function used as part of a "+" expression and a dayTimeDuration. .
*/
@org.junit.Test
public void fnCurrentTime6() {
final XQuery query = new XQuery(
"fn:current-time() + xs:dayTimeDuration(\"P3DT1H15M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:time")
);
}
/**
* Evaluates The "fn-current-time" function used as part of a "-" expression and a dayTimeDuration. .
*/
@org.junit.Test
public void fnCurrentTime7() {
final XQuery query = new XQuery(
"fn:current-time() - xs:dayTimeDuration(\"P3DT1H15M\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertType("xs:time")
);
}
/**
* Evaluates The "current-time" function invoked with incorrect arity. .
*/
@org.junit.Test
public void fnCurrentTime8() {
final XQuery query = new XQuery(
"fn:current-time(\"Argument 1\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPST0017")
);
}
/**
* Evaluates The "current-time" function as part of of a subtraction expression. Both operands are the equal to "current-time". .
*/
@org.junit.Test
public void fnCurrentTime9() {
final XQuery query = new XQuery(
"fn:current-time() - fn:current-time()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "PT0S")
);
}
}
| |
package jp.gauzau.MikuMikuDroidmod;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.SortedSet;
import java.util.TreeSet;
import android.opengl.Matrix;
import android.util.Log;
public class Miku {
public class RenderSet {
public String shader;
public String target;
public RenderSet(String s, String t) {
shader = s;
target = t;
}
}
// use NDK
private boolean mIsArm = false;
// model data
public MikuModel mModel;
// motion data
public MikuMotion mMotion;
// render senario
public ArrayList<RenderSet> mRenderSenario = new ArrayList<RenderSet>();
// temporary data
private MotionPair mMpWork = new MotionPair();
private MotionIndex mMwork = new MotionIndex();
private float effecterVecs[] = new float[4];
private float effecterInvs[] = new float[4];
private float targetVecs[] = new float[4];
private float targetInvs[] = new float[4];
private float axis[] = new float[3];
private float mMatworks[] = new float[16];
private double[] mQuatworks = new double[4];
private float tmpVecs[] = new float[3];
private FacePair mFacePair = new FacePair();
private FaceIndex mFaceIndex = new FaceIndex();
private Bone mZeroBone;
public Miku(MikuModel model) {
mModel = model;
mMwork.location = new float[3];
mMwork.rotation = new float[4];
mIsArm = CoreLogic.isArm();
// for physics simulation
mZeroBone = new Bone();
mZeroBone.matrix = new float[16];
mZeroBone.head_pos = new float[3];
Matrix.setIdentityM(mZeroBone.matrix, 0);
mZeroBone.head_pos[0] = 0; mZeroBone.head_pos[1] = 0; mZeroBone.head_pos[2] = 0;
}
public void attachMotion(MikuMotion mm) {
mMotion = mm;
mm.attachModel(mModel.mBone, mModel.mFace);
if(mModel.mIK != null && mm.getIKMotion() == null) {
// preCalcIK();
Log.d("Miku", "IK calcuration");
preCalcKeyFrameIK();
}
}
public void addRenderSenario(String s, String t) {
mRenderSenario.add(new RenderSet(s, t));
}
public void setBonePosByVMDFramePre(float i, float step, boolean initPhysics) {
ArrayList<Bone> ba = mModel.mBone;
if(ba != null) {
int max = ba.size();
for (int r = 0; r < max; r++) {
Bone b = ba.get(r);
setBoneMatrix(b, i);
}
if(mModel.mIK != null && mMotion.getIKMotion() == null) {
ccdIK();
}
for (int r = 0; r < max; r++) {
Bone b = ba.get(r);
updateBoneMatrix(b);
}
if(mIsArm) {
if(initPhysics) {
initializePhysics();
}
solvePhysicsPre();
}
}
}
public void setBonePosByVMDFramePost(boolean physics) {
ArrayList<Bone> ba = mModel.mBone;
if(ba != null) {
int max = ba.size();
if(mIsArm && physics) {
solvePhysicsPost();
}
for (int r = 0; r < max; r++) {
Bone b = ba.get(r);
Matrix.translateM(b.matrix, 0, -b.head_pos[0], -b.head_pos[1], -b.head_pos[2]);
b.updated = false;
}
}
}
public void setFaceByVMDFrame(float i) {
if (mModel.mFaceBase != null) {
if(mIsArm) { // use native code for ARM machine
initFaceNative(mModel.mAllBuffer, mModel.mFaceBase.face_vert_count, mModel.mFaceBase.face_vert_index_native, mModel.mFaceBase.face_vert_offset_native);
for (Face f : mModel.mFace) {
setFaceForNative(f, i);
}
} else { // use Java code
initFace(mModel.mFaceBase);
for (Face f : mModel.mFace) {
setFace(f, i);
}
updateVertexFace(mModel.mFaceBase);
}
}
}
private void initFace(Face f) {
for (int i = 0; i < f.face_vert_count; i++) {
f.face_vert_base[i*3+0] = f.face_vert_offset[i*3+0];
f.face_vert_base[i*3+1] = f.face_vert_offset[i*3+1];
f.face_vert_base[i*3+2] = f.face_vert_offset[i*3+2];
f.face_vert_updated[i] = false;
}
}
private void setFace(Face f, float i) {
FacePair mp = mMotion.findFace(f, i, mFacePair);
FaceIndex m = mMotion.interpolateLinear(mp, f.motion, i, mFaceIndex);
if (m != null && m.weight > 0) {
for (int r = 0; r < f.face_vert_count; r++) {
int baseidx = f.face_vert_index[r];
mModel.mFaceBase.face_vert_base[baseidx*3+0] += f.face_vert_offset[r*3+0] * m.weight;
mModel.mFaceBase.face_vert_base[baseidx*3+1] += f.face_vert_offset[r*3+1] * m.weight;
mModel.mFaceBase.face_vert_base[baseidx*3+2] += f.face_vert_offset[r*3+2] * m.weight;
mModel.mFaceBase.face_vert_updated[baseidx] = true;
}
}
}
private void updateVertexFace(Face f) {
for (int r = 0; r < f.face_vert_count; r++) {
if (f.face_vert_updated[r] || !f.face_vert_cleared[r]) {
mModel.mAllBuffer.position(f.face_vert_index[r]);
mModel.mAllBuffer.put(f.face_vert_base, r*3, 3);
f.face_vert_cleared[r] = !f.face_vert_updated[r];
}
}
mModel.mAllBuffer.position(0);
}
private void setFaceForNative(Face f, float i) {
FacePair mp = mMotion.findFace(f, i, mFacePair);
FaceIndex m = mMotion.interpolateLinear(mp, f.motion, i, mFaceIndex);
if (m != null && m.weight > 0) {
setFaceNative(mModel.mAllBuffer, mModel.mFaceBase.face_vert_index_native, f.face_vert_count, f.face_vert_index_native, f.face_vert_offset_native, m.weight);
}
}
native private void initFaceNative(FloatBuffer vertex, int count, IntBuffer index, FloatBuffer offset);
native private void setFaceNative(FloatBuffer vertex, IntBuffer pointer, int count, IntBuffer index, FloatBuffer offset, float weight);
native private int btAddRigidBody(int type, int shape, float w, float h, float d, float[] pos, float[] rot, float[] head_pos, float[] bone, float mass, float v_dim, float r_dim, float recoil, float friction, byte group_index, short group_target);
native private int btAddJoint(int rb1, int rb2, float[] pos, float[] rot, float[] p1, float[] p2, float[] r1, float[] r2, float[] sp, float[] sr);
native private float btGetOpenGLMatrix(int rb, float[] matrix, float[] pos, float[] rot);
native private float btSetOpenGLMatrix(int rb, float[] matrix, float[] pos, float[] rot);
private void initializePhysics() {
///////////////////////////////////////////
// MAKE RIGID BODIES
ArrayList<RigidBody> rba = mModel.mRigidBody;
if(rba != null) {
for(int i = 0; i < rba.size(); i++) {
RigidBody rb = rba.get(i);
Bone b = rb.bone_index == -1 ? mZeroBone : mModel.mBone.get(rb.bone_index);
rb.btrb = btAddRigidBody(rb.type, rb.shape,
rb.size[0], rb.size[1], rb.size[2],
rb.location, rb.rotation, b.head_pos, b.matrix,
rb.weight, rb.v_dim, rb.r_dim, rb.recoil, rb.friction,
rb.group_index, rb.group_target);
}
}
///////////////////////////////////////////
// MAKE JOINTS
ArrayList<Joint> ja = mModel.mJoint;
if(ja != null) {
for(int i = 0; i < ja.size(); i++) {
Joint j = ja.get(i);
int rb1 = rba.get(j.rigidbody_a).btrb;
int rb2 = rba.get(j.rigidbody_b).btrb;
j.btcst = btAddJoint(rb1, rb2, j.position, j.rotation, j.const_position_1, j.const_position_2,
j.const_rotation_1, j.const_rotation_2, j.spring_position, j.spring_rotation);
}
}
}
private void solvePhysicsPre() {
if(mModel.mRigidBody != null) {
for(int i = 0; i < mModel.mRigidBody.size(); i++) {
RigidBody rb = mModel.mRigidBody.get(i);
if(rb.bone_index >= 0 && rb.type == 0) {
Bone b = mModel.mBone.get(rb.bone_index);
btSetOpenGLMatrix(rb.btrb, b.matrix, rb.location, rb.rotation);
}
}
}
}
private void solvePhysicsPost() {
if(mModel.mRigidBody != null) {
for(int i = 0; i < mModel.mRigidBody.size(); i++) {
RigidBody rb = mModel.mRigidBody.get(i);
if(rb.bone_index >= 0 && rb.type != 0) {
Bone b = mModel.mBone.get(rb.bone_index);
if(rb.type == 1) {
btGetOpenGLMatrix(rb.btrb, b.matrix, rb.location, rb.rotation);
} else { // rb.type == 2
btGetOpenGLMatrix(rb.btrb, b.matrix_current, rb.location, rb.rotation);
for(int j = 0; j < 12; j++) {
b.matrix[j] = b.matrix_current[j];
}
}
b.updated = true;
}
}
}
}
private void preCalcKeyFrameIK() {
float[] location = new float[3];
location[0] = location[1] = location[2] = 0;
HashMap<String, ArrayList<MotionIndex>> mhs = new HashMap<String, ArrayList<MotionIndex>>();
for (IK ik : mModel.mIK) {
// find parents
HashMap<Integer, Bone> parents = new HashMap<Integer, Bone>();
int target = ik.ik_target_bone_index;
while (target != -1) {
Bone b = mModel.mBone.get(target);
parents.put(target, b);
target = b.parent;
}
int effecter = ik.ik_bone_index;
while (effecter != -1) {
Bone b = parents.get(effecter);
if (b != null) {
parents.remove(effecter);
} else {
b = mModel.mBone.get(effecter);
parents.put(effecter, b);
}
effecter = b.parent;
}
// gather frames
SortedSet<Integer> frames = new TreeSet<Integer>();
for (Bone bone : parents.values()) {
if (bone.motion != null) {
for(int fn : bone.motion.frame_no) {
frames.add(fn);
}
/*
for (MotionIndex frame : bones.getValue().motion) {
frames.put(frame.frame_no, frame.frame_no);
}
*/
}
}
// calc IK
HashMap<Short, ArrayList<MotionIndex>> mhash = new HashMap<Short, ArrayList<MotionIndex>>();
for (Integer frame : frames) {
for (Bone b : mModel.mBone) {
setBoneMatrix(b, frame);
}
ccdIK1(ik);
for (int i = 0; i < ik.ik_chain_length; i++) {
Bone c = mModel.mBone.get(ik.ik_child_bone_index[i]);
MotionIndex cm = new MotionIndex();
cm.frame_no = frame;
cm.location = location;
cm.rotation = new float[4];
cm.rotation[0] = (float) c.quaternion[0]; // calc in ccdIK
cm.rotation[1] = (float) c.quaternion[1];
cm.rotation[2] = (float) c.quaternion[2];
cm.rotation[3] = (float) c.quaternion[3];
cm.interp = null;
ArrayList<MotionIndex> mi = mhash.get(ik.ik_child_bone_index[i]);
if (mi == null) {
mi = new ArrayList<MotionIndex>();
mhash.put(ik.ik_child_bone_index[i], mi);
}
mi.add(cm);
}
}
// set motions to bones and motion
for (Entry<Short, ArrayList<MotionIndex>> entry : mhash.entrySet()) {
Bone b = mModel.mBone.get(entry.getKey());
b.motion = MikuMotion.toMotionIndexA(entry.getValue());
b.current_motion = 0;
mhs.put(b.name, entry.getValue());
}
}
mMotion.setIKMotion(mhs);
}
private void setBoneMatrix(Bone b, float idx) {
MotionPair mp = mMotion.findMotion(b, idx, mMpWork);
MotionIndex m = mMotion.interpolateLinear(mp, b.motion, idx, mMwork);
if (m != null) {
b.quaternion[0] = m.rotation[0];
b.quaternion[1] = m.rotation[1];
b.quaternion[2] = m.rotation[2];
b.quaternion[3] = m.rotation[3];
Quaternion.toMatrix(b.matrix_current, m.rotation);
if (b.parent == -1) {
b.matrix_current[12] = m.location[0] + b.head_pos[0];
b.matrix_current[13] = m.location[1] + b.head_pos[1];
b.matrix_current[14] = m.location[2] + b.head_pos[2];
} else {
Bone p = mModel.mBone.get(b.parent);
b.matrix_current[12] = m.location[0] + (b.head_pos[0] - p.head_pos[0]);
b.matrix_current[13] = m.location[1] + (b.head_pos[1] - p.head_pos[1]);
b.matrix_current[14] = m.location[2] + (b.head_pos[2] - p.head_pos[2]);
}
} else {
// no VMD info so assume that no rotation and translation are specified
Matrix.setIdentityM(b.matrix_current, 0);
Quaternion.setIndentity(b.quaternion);
if (b.parent == -1) {
Matrix.translateM(b.matrix_current, 0, b.head_pos[0], b.head_pos[1], b.head_pos[2]);
} else {
Bone p = mModel.mBone.get(b.parent);
Matrix.translateM(b.matrix_current, 0, b.head_pos[0], b.head_pos[1], b.head_pos[2]);
Matrix.translateM(b.matrix_current, 0, -p.head_pos[0], -p.head_pos[1], -p.head_pos[2]);
}
}
}
private void updateBoneMatrix(Bone b) {
if (b.updated == false) {
if (b.parent != -1) {
Bone p = mModel.mBone.get(b.parent);
updateBoneMatrix(p);
Matrix.multiplyMM(b.matrix, 0, p.matrix, 0, b.matrix_current, 0);
} else {
for (int i = 0; i < 16; i++) {
b.matrix[i] = b.matrix_current[i];
}
}
b.updated = true;
}
}
private void ccdIK() {
for (IK ik : mModel.mIK) {
ccdIK1(ik);
}
}
private void ccdIK1(IK ik) {
Bone effecter = mModel.mBone.get(ik.ik_bone_index);
Bone target = mModel.mBone.get(ik.ik_target_bone_index);
getCurrentPosition(effecterVecs, effecter);
for (int i = 0; i < ik.iterations; i++) {
for (int j = 0; j < ik.ik_chain_length; j++) {
Bone b = mModel.mBone.get(ik.ik_child_bone_index[j]);
clearUpdateFlags(b, target);
getCurrentPosition(targetVecs, target);
if (b.is_leg) {
if (i == 0) {
Bone base = mModel.mBone.get(ik.ik_child_bone_index[ik.ik_chain_length - 1]);
getCurrentPosition(targetInvs, b);
getCurrentPosition(effecterInvs, base);
double eff_len = Matrix.length(effecterVecs[0] - effecterInvs[0], effecterVecs[1] - effecterInvs[1], effecterVecs[2] - effecterInvs[2]);
double b_len = Matrix.length(targetInvs[0] - effecterInvs[0], targetInvs[1] - effecterInvs[1], targetInvs[2] - effecterInvs[2]);
double t_len = Matrix.length(targetVecs[0] - targetInvs[0], targetVecs[1] - targetInvs[1], targetVecs[2] - targetInvs[2]);
double angle = Math.acos((eff_len * eff_len - b_len * b_len - t_len * t_len) / (2 * b_len * t_len));
if (!Double.isNaN(angle)) {
axis[0] = -1;
axis[1] = axis[2] = 0;
Quaternion.createFromAngleAxis(mQuatworks, angle, axis);
Quaternion.mul(b.quaternion, b.quaternion, mQuatworks);
Quaternion.toMatrixPreserveTranslate(b.matrix_current, b.quaternion);
}
}
continue;
}
if (Matrix.length(targetVecs[0] - effecterVecs[0], targetVecs[1] - effecterVecs[1], targetVecs[2] - effecterVecs[2]) < 0.001f) {
// clear all
for (Bone c : mModel.mBone) {
c.updated = false;
}
return;
}
float[] current = getCurrentMatrix(b);
Vector.invertM(mMatworks, 0, current, 0);
Matrix.multiplyMV(effecterInvs, 0, mMatworks, 0, effecterVecs, 0);
Matrix.multiplyMV(targetInvs, 0, mMatworks, 0, targetVecs, 0);
// calculate rotation angle/axis
Vector.normalize(effecterInvs);
Vector.normalize(targetInvs);
double angle = Math.acos(Vector.dot(effecterInvs, targetInvs));
angle *= ik.control_weight;
if (!Double.isNaN(angle)) {
Vector.cross(axis, targetInvs, effecterInvs);
Vector.normalize(axis);
// rotateM(mMatworks, 0, b.matrix_current, 0, degree, axis[0], axis[1], axis[2]);
// System.arraycopy(mMatworks, 0, b.matrix_current, 0, 16);
if (!Double.isNaN(axis[0]) && !Double.isNaN(axis[1]) && !Double.isNaN(axis[2])) {
Quaternion.createFromAngleAxis(mQuatworks, angle, axis);
Quaternion.mul(b.quaternion, b.quaternion, mQuatworks);
Quaternion.toMatrixPreserveTranslate(b.matrix_current, b.quaternion);
}
}
}
}
// clear all
for (Bone b : mModel.mBone) {
b.updated = false;
}
}
private void clearUpdateFlags(Bone root, Bone b) {
while (root != b) {
b.updated = false;
if (b.parent != -1) {
b = mModel.mBone.get(b.parent);
} else {
return;
}
}
root.updated = false;
}
private void getCurrentPosition(float v[], Bone b) {
float[] current = getCurrentMatrix(b);
System.arraycopy(current, 12, v, 0, 3);
v[3] = 1;
}
private float[] getCurrentMatrix(Bone b) {
updateBoneMatrix(b);
return b.matrix;
}
public boolean hasMotion() {
return mMotion != null;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authc.saml;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.rest.RestUtils;
import org.elasticsearch.xpack.core.security.support.RestorableContextClassLoader;
import org.joda.time.DateTime;
import org.opensaml.core.xml.XMLObject;
import org.opensaml.core.xml.io.Unmarshaller;
import org.opensaml.core.xml.io.UnmarshallerFactory;
import org.opensaml.core.xml.io.UnmarshallingException;
import org.opensaml.saml.saml2.core.Issuer;
import org.opensaml.saml.saml2.encryption.Decrypter;
import org.opensaml.saml.security.impl.SAMLSignatureProfileValidator;
import org.opensaml.security.credential.Credential;
import org.opensaml.security.x509.X509Credential;
import org.opensaml.xmlsec.crypto.XMLSigningUtil;
import org.opensaml.xmlsec.encryption.support.ChainingEncryptedKeyResolver;
import org.opensaml.xmlsec.encryption.support.EncryptedKeyResolver;
import org.opensaml.xmlsec.encryption.support.InlineEncryptedKeyResolver;
import org.opensaml.xmlsec.encryption.support.SimpleKeyInfoReferenceEncryptedKeyResolver;
import org.opensaml.xmlsec.encryption.support.SimpleRetrievalMethodEncryptedKeyResolver;
import org.opensaml.xmlsec.keyinfo.KeyInfoCredentialResolver;
import org.opensaml.xmlsec.keyinfo.impl.ChainingKeyInfoCredentialResolver;
import org.opensaml.xmlsec.keyinfo.impl.CollectionKeyInfoCredentialResolver;
import org.opensaml.xmlsec.keyinfo.impl.LocalKeyInfoCredentialResolver;
import org.opensaml.xmlsec.keyinfo.impl.provider.DEREncodedKeyValueProvider;
import org.opensaml.xmlsec.keyinfo.impl.provider.InlineX509DataProvider;
import org.opensaml.xmlsec.keyinfo.impl.provider.KeyInfoReferenceProvider;
import org.opensaml.xmlsec.keyinfo.impl.provider.RSAKeyValueProvider;
import org.opensaml.xmlsec.signature.Signature;
import org.opensaml.xmlsec.signature.support.SignatureException;
import org.opensaml.xmlsec.signature.support.SignatureValidator;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.time.Clock;
import java.time.Instant;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
import static org.elasticsearch.xpack.security.authc.saml.SamlUtils.samlException;
import static org.opensaml.core.xml.config.XMLObjectProviderRegistrySupport.getUnmarshallerFactory;
public class SamlObjectHandler {
protected static final String SAML_NAMESPACE = "urn:oasis:names:tc:SAML:2.0:protocol";
private static final String[] XSD_FILES = new String[] { "/org/elasticsearch/xpack/security/authc/saml/saml-schema-protocol-2.0.xsd",
"/org/elasticsearch/xpack/security/authc/saml/saml-schema-assertion-2.0.xsd",
"/org/elasticsearch/xpack/security/authc/saml/xenc-schema.xsd",
"/org/elasticsearch/xpack/security/authc/saml/xmldsig-core-schema.xsd" };
private static final ThreadLocal<DocumentBuilder> THREAD_LOCAL_DOCUMENT_BUILDER = ThreadLocal.withInitial(() -> {
try {
return SamlUtils.getHardenedBuilder(XSD_FILES);
} catch (Exception e) {
throw samlException("Could not load XSD schema file", e);
}
});
protected final Logger logger = LogManager.getLogger(getClass());
@Nullable
protected final Decrypter decrypter;
private final Clock clock;
private final IdpConfiguration idp;
private final SpConfiguration sp;
private final TimeValue maxSkew;
private final UnmarshallerFactory unmarshallerFactory;
public SamlObjectHandler(Clock clock, IdpConfiguration idp, SpConfiguration sp, TimeValue maxSkew) {
this.clock = clock;
this.idp = idp;
this.sp = sp;
this.maxSkew = maxSkew;
this.unmarshallerFactory = getUnmarshallerFactory();
if (sp.getEncryptionCredentials().isEmpty()) {
this.decrypter = null;
} else {
this.decrypter = new Decrypter(null, createResolverForEncryptionKeys(), createResolverForEncryptedKeyElements());
}
}
private KeyInfoCredentialResolver createResolverForEncryptionKeys() {
final CollectionKeyInfoCredentialResolver collectionKeyInfoCredentialResolver =
new CollectionKeyInfoCredentialResolver(Collections.unmodifiableCollection(sp.getEncryptionCredentials()));
final LocalKeyInfoCredentialResolver localKeyInfoCredentialResolver =
new LocalKeyInfoCredentialResolver(Arrays.asList(new InlineX509DataProvider(), new KeyInfoReferenceProvider(),
new RSAKeyValueProvider(), new DEREncodedKeyValueProvider()), collectionKeyInfoCredentialResolver);
return new ChainingKeyInfoCredentialResolver(Arrays.asList(localKeyInfoCredentialResolver, collectionKeyInfoCredentialResolver));
}
private EncryptedKeyResolver createResolverForEncryptedKeyElements() {
return new ChainingEncryptedKeyResolver(Arrays.asList(new InlineEncryptedKeyResolver(),
new SimpleRetrievalMethodEncryptedKeyResolver(), new SimpleKeyInfoReferenceEncryptedKeyResolver()));
}
protected SpConfiguration getSpConfiguration() {
return sp;
}
protected String describe(X509Certificate certificate) {
return "X509Certificate{Subject=" + certificate.getSubjectDN() + "; SerialNo=" +
certificate.getSerialNumber().toString(16) + "}";
}
protected String describe(Collection<X509Credential> credentials) {
return credentials.stream().map(credential -> describe(credential.getEntityCertificate())).collect(Collectors.joining(","));
}
void validateSignature(Signature signature) {
final String signatureText = text(signature, 32);
SAMLSignatureProfileValidator profileValidator = new SAMLSignatureProfileValidator();
try {
profileValidator.validate(signature);
} catch (SignatureException e) {
throw samlSignatureException(idp.getSigningCredentials(), signatureText, e);
}
checkIdpSignature(credential -> {
try {
return AccessController.doPrivileged((PrivilegedExceptionAction<Boolean>) () -> {
try (RestorableContextClassLoader ignore = new RestorableContextClassLoader(SignatureValidator.class)) {
SignatureValidator.validate(signature, credential);
logger.debug(() -> new ParameterizedMessage("SAML Signature [{}] matches credentials [{}] [{}]",
signatureText, credential.getEntityId(), credential.getPublicKey()));
return true;
} catch (PrivilegedActionException e) {
logger.warn("SecurityException while attempting to validate SAML signature", e);
return false;
}
});
} catch (PrivilegedActionException e) {
throw new SecurityException("SecurityException while attempting to validate SAML signature", e);
}
}, signatureText);
}
/**
* Tests whether the provided function returns {@code true} for any of the IdP's signing credentials.
* @throws ElasticsearchSecurityException - A SAML exception if not matching credential is found.
*/
protected void checkIdpSignature(CheckedFunction<Credential, Boolean, Exception> check, String signatureText) {
final Predicate<Credential> predicate = credential -> {
try {
return check.apply(credential);
} catch (SignatureException | SecurityException e) {
logger.debug(() -> new ParameterizedMessage("SAML Signature [{}] does not match credentials [{}] [{}] -- {}",
signatureText, credential.getEntityId(), credential.getPublicKey(), e));
logger.trace("SAML Signature failure caused by", e);
return false;
} catch (Exception e) {
logger.warn("Exception while attempting to validate SAML Signature", e);
return false;
}
};
final List<Credential> credentials = idp.getSigningCredentials();
if (credentials.stream().anyMatch(predicate) == false) {
throw samlSignatureException(credentials, signatureText);
}
}
/**
* Constructs a SAML specific exception with a consistent message regarding SAML Signature validation failures
*/
private ElasticsearchSecurityException samlSignatureException(List<Credential> credentials, String signature, Exception cause) {
logger.warn("The XML Signature of this SAML message cannot be validated. Please verify that the saml realm uses the correct SAML" +
"metadata file/URL for this Identity Provider");
final String msg = "SAML Signature [{}] could not be validated against [{}]";
return samlException(msg, cause, signature, describeCredentials(credentials));
}
private ElasticsearchSecurityException samlSignatureException(List<Credential> credentials, String signature) {
logger.warn("The XML Signature of this SAML message cannot be validated. Please verify that the saml realm uses the correct SAML" +
"metadata file/URL for this Identity Provider");
final String msg = "SAML Signature [{}] could not be validated against [{}]";
return samlException(msg, signature, describeCredentials(credentials));
}
private String describeCredentials(List<Credential> credentials) {
return credentials.stream()
.map(c -> {
if (c == null) {
return "<null>";
}
byte[] encoded;
if (c instanceof X509Credential) {
X509Credential x = (X509Credential) c;
try {
encoded = x.getEntityCertificate().getEncoded();
} catch (CertificateEncodingException e) {
encoded = c.getPublicKey().getEncoded();
}
} else {
encoded = c.getPublicKey().getEncoded();
}
return Base64.getEncoder().encodeToString(encoded).substring(0, 64) + "...";
})
.collect(Collectors.joining(","));
}
protected void checkIssuer(Issuer issuer, XMLObject parent) {
if (issuer == null) {
throw samlException("Element {} ({}) has no issuer, but expected [{}]",
parent.getElementQName(), text(parent, 16), idp.getEntityId());
}
if (idp.getEntityId().equals(issuer.getValue()) == false) {
throw samlException("SAML Issuer [{}] does not match expected value [{}]", issuer.getValue(), idp.getEntityId());
}
}
protected long maxSkewInMillis() {
return this.maxSkew.millis();
}
protected java.time.Instant now() {
return clock.instant();
}
/**
* Converts a Joda DateTime into a Java Instant
*/
protected Instant toInstant(DateTime dateTime) {
if (dateTime == null) {
return null;
}
return Instant.ofEpochMilli(dateTime.getMillis());
}
// Package private for testing
<T extends XMLObject> T buildXmlObject(Element element, Class<T> type) {
try {
Unmarshaller unmarshaller = unmarshallerFactory.getUnmarshaller(element);
if (unmarshaller == null) {
throw samlException("XML element [{}] cannot be unmarshalled to SAML type [{}] (no unmarshaller)",
element.getTagName(), type);
}
final XMLObject object = unmarshaller.unmarshall(element);
if (type.isInstance(object)) {
return type.cast(object);
}
Object[] args = new Object[] { element.getTagName(), type.getName(), object == null ? "<null>" : object.getClass().getName() };
throw samlException("SAML object [{}] is incorrect type. Expected [{}] but was [{}]", args);
} catch (UnmarshallingException e) {
throw samlException("Failed to unmarshall SAML content [{}]", e, element.getTagName());
}
}
protected String text(XMLObject xml, int length) {
return text(xml, length, 0);
}
protected static String text(XMLObject xml, int prefixLength, int suffixLength) {
final Element dom = xml.getDOM();
if (dom == null) {
return null;
}
final String text = dom.getTextContent().trim();
final int totalLength = prefixLength + suffixLength;
if (text.length() > totalLength) {
final String prefix = Strings.cleanTruncate(text, prefixLength) + "...";
if (suffixLength == 0) {
return prefix;
}
int suffixIndex = text.length() - suffixLength;
if (Character.isHighSurrogate(text.charAt(suffixIndex))) {
suffixIndex++;
}
return prefix + text.substring(suffixIndex);
} else {
return text;
}
}
protected Element parseSamlMessage(byte[] content) {
final Element root;
try (ByteArrayInputStream input = new ByteArrayInputStream(content)) {
// This will parse and validate the input
final Document doc = THREAD_LOCAL_DOCUMENT_BUILDER.get().parse(input);
root = doc.getDocumentElement();
if (logger.isTraceEnabled()) {
logger.trace("Received SAML Message: {} \n", SamlUtils.toString(root, true));
}
} catch (SAXException | IOException e) {
throw samlException("Failed to parse SAML message", e);
}
return root;
}
protected void validateNotOnOrAfter(DateTime notOnOrAfter) {
if (notOnOrAfter == null) {
return;
}
final Instant now = now();
final Instant pastNow = now.minusMillis(this.maxSkew.millis());
if (pastNow.isBefore(toInstant(notOnOrAfter)) == false) {
throw samlException("Rejecting SAML assertion because [{}] is on/after [{}]", pastNow, notOnOrAfter);
}
}
protected ParsedQueryString parseQueryStringAndValidateSignature(String queryString, String samlMessageParameterName) {
final String signatureInput = queryString.replaceAll("&Signature=.*$", "");
final Map<String, String> parameters = new HashMap<>();
RestUtils.decodeQueryString(queryString, 0, parameters);
final String samlMessage = parameters.get(samlMessageParameterName);
if (samlMessage == null) {
throw samlException("Could not parse {} from query string: [{}]", samlMessageParameterName, queryString);
}
final String relayState = parameters.get("RelayState");
final String signatureAlgorithm = parameters.get("SigAlg");
final String signature = parameters.get("Signature");
if (signature == null || signatureAlgorithm == null) {
return new ParsedQueryString(samlMessage, false, relayState);
}
validateSignature(signatureInput, signatureAlgorithm, signature);
return new ParsedQueryString(samlMessage, true, relayState);
}
private void validateSignature(String inputString, String signatureAlgorithm, String signature) {
final byte[] sigBytes = decodeBase64(signature);
final byte[] inputBytes = inputString.getBytes(StandardCharsets.US_ASCII);
final String signatureText = Strings.cleanTruncate(signature, 32);
checkIdpSignature(credential -> {
if (XMLSigningUtil.verifyWithURI(credential, signatureAlgorithm, sigBytes, inputBytes)) {
logger.debug(() -> new ParameterizedMessage("SAML Signature [{}] matches credentials [{}] [{}]",
signatureText, credential.getEntityId(), credential.getPublicKey()));
return true;
} else {
logger.debug(() -> new ParameterizedMessage("SAML Signature [{}] failed against credentials [{}] [{}]",
signatureText, credential.getEntityId(), credential.getPublicKey()));
return false;
}
}, signatureText);
}
protected byte[] decodeBase64(String content) {
try {
return Base64.getDecoder().decode(content.replaceAll("\\s+", ""));
} catch (IllegalArgumentException e) {
logger.info("Failed to decode base64 string [{}] - {}", content, e.toString());
throw samlException("SAML message cannot be Base64 decoded", e);
}
}
protected byte[] inflate(byte[] bytes) {
Inflater inflater = new Inflater(true);
try (ByteArrayInputStream in = new ByteArrayInputStream(bytes);
InflaterInputStream inflate = new InflaterInputStream(in, inflater);
ByteArrayOutputStream out = new ByteArrayOutputStream(bytes.length * 3 / 2)) {
Streams.copy(inflate, out);
return out.toByteArray();
} catch (IOException e) {
throw samlException("SAML message cannot be inflated", e);
}
}
static class ParsedQueryString {
final String samlMessage;
final boolean hasSignature;
final String relayState;
ParsedQueryString(String samlMessage, boolean hasSignature, String relayState) {
this.samlMessage = samlMessage;
this.hasSignature = hasSignature;
this.relayState = relayState;
}
}
}
| |
/*
* Copyright 2015-2016 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.app.daemon;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
import org.opencb.commons.datastore.core.ObjectMap;
import org.opencb.commons.datastore.core.Query;
import org.opencb.commons.datastore.core.QueryOptions;
import org.opencb.commons.datastore.core.QueryResult;
import org.opencb.opencga.catalog.monitor.ExecutionOutputRecorder;
import org.opencb.opencga.catalog.monitor.exceptions.ExecutionException;
import org.opencb.opencga.catalog.monitor.executors.old.ExecutorManager;
import org.opencb.opencga.catalog.exceptions.CatalogException;
import org.opencb.opencga.catalog.managers.CatalogManager;
import org.opencb.opencga.catalog.config.CatalogConfiguration;
import org.opencb.opencga.catalog.db.api.FileDBAdaptor;
import org.opencb.opencga.catalog.models.File;
import org.opencb.opencga.catalog.models.Job;
import org.opencb.opencga.catalog.models.Study;
import org.opencb.opencga.catalog.managers.CatalogFileUtils;
import org.opencb.opencga.core.SgeManager;
import org.opencb.opencga.core.common.Config;
import org.opencb.opencga.core.common.TimeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Properties;
/**
* Created by jacobo on 23/10/14.
*/
@Deprecated
public class DaemonLoop implements Runnable {
public static final String PORT = "OPENCGA.APP.DAEMON.PORT";
public static final String SLEEP = "OPENCGA.APP.DAEMON.SLEEP";
public static final String USER = "OPENCGA.APP.DAEMON.USER";
public static final String PASSWORD = "OPENCGA.APP.DAEMON.PASSWORD";
public static final String DELETE_DELAY = "OPENCGA.APP.DAEMON.DELETE_DELAY";
private final Properties properties;
private Server server;
private Thread thread;
private boolean exit = false;
private CatalogManager catalogManager;
private static Logger logger = LoggerFactory.getLogger(DaemonLoop.class);
private ExecutionOutputRecorder analysisOutputRecorder;
private String sessionId;
@Deprecated
public DaemonLoop(Properties properties) {
this.properties = properties;
try {
CatalogConfiguration catalogConfiguration = CatalogConfiguration.load(new FileInputStream(Paths.get(Config.getOpenCGAHome(),
"conf", "catalog-configuration.yml").toFile()));
catalogManager = new CatalogManager(catalogConfiguration);
} catch (CatalogException | IOException e) {
e.printStackTrace();
}
// analysisFileIndexer = new AnalysisFileIndexer(catalogManager, Config.getAnalysisProperties());
int port = Integer.parseInt(properties.getProperty(DaemonLoop.PORT, "61976"));
ResourceConfig resourceConfig = new ResourceConfig();
resourceConfig.packages(true, "org.opencb.opencga.app.daemon.rest");
ServletContainer sc = new ServletContainer(resourceConfig);
ServletHolder sh = new ServletHolder(sc);
logger.info("Server in port : {}", port);
server = new Server(port);
ServletContextHandler context = new ServletContextHandler(server, null, ServletContextHandler.SESSIONS);
context.addServlet(sh, "/opencga/rest/*");
thread = new Thread(this);
}
@Override
public void run() {
int sleep = Integer.parseInt(properties.getProperty(SLEEP, "4000"));
sessionId = null;
try {
QueryResult<ObjectMap> login = catalogManager.login(properties.getProperty(USER), properties.getProperty(PASSWORD), "daemon");
sessionId = login.getResult().get(0).getString("sessionId");
} catch (CatalogException | IOException e) {
e.printStackTrace();
exit = true;
}
analysisOutputRecorder = new ExecutionOutputRecorder(catalogManager, sessionId);
while (!exit) {
try {
Thread.sleep(sleep);
} catch (InterruptedException e) {
if (!exit) {
e.printStackTrace();
}
}
logger.info("----- WakeUp {} -----", TimeUtils.getTimeMillis());
logger.info("----- Pending jobs -----");
try {
QueryResult<Job> unfinishedJobs = catalogManager.getUnfinishedJobs(sessionId);
for (Job job : unfinishedJobs.getResult()) {
String status = null;
try {
status = SgeManager.status(job.getResourceManagerAttributes().get(Job.JOB_SCHEDULER_NAME).toString());
} catch (Exception e) {
logger.warn(e.getMessage());
}
String jobStatusEnum = job.getStatus().getName();
// String type = job.getResourceManagerAttributes().get(Job.TYPE).toString();
// System.out.println("job : {id: " + job.getId() + ", status: '" + job.getName() + "', name: '" + job.getName() + "'}, sgeStatus : " + status);
logger.info("job : {id: " + job.getId() + ", status: '" + job.getStatus().getName() + "', name: '" + job.getName() + "'}, sgeStatus : " + status);
//Track SGEManager
if (status != null) {
switch (status) {
case SgeManager.FINISHED:
if (!Job.JobStatus.DONE.equals(job.getStatus().getName())) {
catalogManager.modifyJob(job.getId(), new ObjectMap("status", Job.JobStatus.DONE), sessionId);
jobStatusEnum = Job.JobStatus.DONE;
}
break;
case SgeManager.ERROR:
case SgeManager.EXECUTION_ERROR:
if (!Job.JobStatus.DONE.equals(job.getStatus().getName())) {
ObjectMap parameters = new ObjectMap();
parameters.put("status", Job.JobStatus.DONE);
String error = Job.ERRNO_FINISH_ERROR;
parameters.put("error", error);
parameters.put("errorDescription", Job.ERROR_DESCRIPTIONS.get(error));
catalogManager.modifyJob(job.getId(), parameters, sessionId);
jobStatusEnum = Job.JobStatus.DONE;
job.setError(error);
}
break;
case SgeManager.QUEUED:
if (!Job.JobStatus.QUEUED.equals(job.getStatus().getName())) {
catalogManager.modifyJob(job.getId(), new ObjectMap("status", Job.JobStatus.QUEUED), sessionId);
jobStatusEnum = Job.JobStatus.QUEUED;
}
break;
case SgeManager.RUNNING:
if (!Job.JobStatus.RUNNING.equals(job.getStatus().getName())) {
catalogManager.modifyJob(job.getId(), new ObjectMap("status", Job.JobStatus.RUNNING), sessionId);
jobStatusEnum = Job.JobStatus.RUNNING;
}
break;
case SgeManager.TRANSFERRED:
break;
case SgeManager.UNKNOWN:
break;
}
}
//Track Catalog Job status
switch (jobStatusEnum) {
case Job.JobStatus.DONE:
boolean jobOk = job.getError() == null || (job.getError() != null && job.getError().isEmpty());
analysisOutputRecorder.recordJobOutputAndPostProcess(job, !jobOk);
if (jobOk) {
catalogManager.modifyJob(job.getId(), new ObjectMap("status", Job.JobStatus.READY), sessionId);
} else {
catalogManager.modifyJob(job.getId(), new ObjectMap("status", Job.JobStatus.ERROR), sessionId);
}
break;
case Job.JobStatus.PREPARED:
try {
ExecutorManager.execute(catalogManager, job, sessionId);
} catch (ExecutionException e) {
ObjectMap params = new ObjectMap("status", Job.JobStatus.ERROR);
String error = Job.ERRNO_NO_QUEUE;
params.put("error", error);
params.put("errorDescription", Job.ERROR_DESCRIPTIONS.get(error));
catalogManager.modifyJob(job.getId(), params, sessionId);
}
break;
case Job.JobStatus.QUEUED:
break;
case Job.JobStatus.RUNNING:
break;
case Job.JobStatus.ERROR:
case Job.JobStatus.READY:
//Never expected!
break;
}
}
} catch (Exception e) {
e.printStackTrace();
}
logger.info("----- Pending deletions -----");
try {
QueryResult<File> files = catalogManager.searchFile(-1, new Query(FileDBAdaptor.QueryParams.FILE_STATUS.key(),
File.FileStatus.TRASHED), new QueryOptions(), sessionId);
long currentTimeMillis = System.currentTimeMillis();
for (File file : files.getResult()) {
try { //TODO: skip if the file is a non-empty folder
long deleteDate = new ObjectMap(file.getAttributes()).getLong("deleteDate", 0);
if (currentTimeMillis - deleteDate > Long.valueOf(properties.getProperty(DELETE_DELAY, "30")) * 1000) { //Seconds to millis
QueryResult<Study> studyQueryResult = catalogManager.getStudy(catalogManager.getStudyIdByFileId(file.getId()), sessionId);
Study study = studyQueryResult.getResult().get(0);
logger.info("Deleting file {} from study {id: {}, alias: {}}", file, study.getId(), study.getAlias());
new CatalogFileUtils(catalogManager).delete(file, sessionId);
} else {
logger.info("Don't delete file {id: {}, path: '{}', attributes: {}}}", file.getId(), file.getPath(), file.getAttributes());
logger.info("{}", (currentTimeMillis - deleteDate) / 1000);
}
} catch (Exception e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
if (sessionId != null) {
try {
catalogManager.logout(properties.getProperty(USER), sessionId);
} catch (CatalogException e) {
e.printStackTrace();
}
sessionId = null;
}
try {
Thread.sleep(200);
server.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
public void start() throws Exception {
//Start services
server.start();
thread.start();
}
public int join() {
//Join services
try {
logger.info("Join to Server");
server.join();
logger.info("Join to Thread");
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
return 2;
}
return 0;
}
synchronized public void stop() {
exit = true;
thread.interrupt();
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2014 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.accessControl;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.swing.tree.TreeNode;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.URIException;
import org.apache.log4j.Logger;
import org.parosproxy.paros.model.Session;
import org.zaproxy.zap.extension.accessControl.widgets.ContextSiteTree;
import org.zaproxy.zap.extension.accessControl.widgets.SiteTreeNode;
import org.zaproxy.zap.extension.accessControl.widgets.UriUtils;
import org.zaproxy.zap.model.Context;
import org.zaproxy.zap.users.User;
/**
* An object that manages the access rules that have been configured for a {@link Context}.
*
* <p>Note: In order to store access rules for unauthenticated visitors, we'll use {@link
* #UNAUTHENTICATED_USER_ID} as the id, which is an id that should not be generated for normal
* users.
*
* @author cosminstefanxp
*/
public class ContextAccessRulesManager {
private static final Logger log = Logger.getLogger(ContextAccessRulesManager.class);
/**
* In order to store access rules for unauthenticated visitors, we'll use -1 as the id, which is
* an id that should not be generated for normal users.
*/
public static final int UNAUTHENTICATED_USER_ID = -1;
/** The separator used during the serialization of the rules. */
private static final char SERIALIZATION_SEPARATOR = '`';
private Context context;
private Map<Integer, Map<SiteTreeNode, AccessRule>> rules;
private ContextSiteTree contextSiteTree;
public ContextAccessRulesManager(Context context) {
this.context = context;
this.rules = new HashMap<>();
this.contextSiteTree = new ContextSiteTree();
}
/**
* Instantiates a new context access rules manager by performing a copy of the provided
* ContextAccessRulesManager.
*
* @param sourceManager the rules manager
*/
public ContextAccessRulesManager(Context context, ContextAccessRulesManager sourceManager) {
this.context = context;
this.contextSiteTree = sourceManager.contextSiteTree;
this.rules = new HashMap<>(sourceManager.rules.size());
Map<SiteTreeNode, AccessRule> userRules;
for (Map.Entry<Integer, Map<SiteTreeNode, AccessRule>> entry :
sourceManager.rules.entrySet()) {
userRules = new HashMap<>(entry.getValue());
this.rules.put(entry.getKey(), userRules);
}
}
/**
* Get the mapping of rules for the user or initialize if needed.
*
* @param userId the user id
* @return the user rules
*/
private Map<SiteTreeNode, AccessRule> getUserRules(int userId) {
Map<SiteTreeNode, AccessRule> userRules = rules.get(userId);
if (userRules == null) {
userRules = new HashMap<>();
this.rules.put(userId, userRules);
}
return userRules;
}
/**
* Gets the access rule for a user and a node, if any.
*
* @param userId the user id
* @param node the node
* @return the rule
*/
public AccessRule getDefinedRule(int userId, SiteTreeNode node) {
AccessRule rule = getUserRules(userId).get(node);
return rule == null ? AccessRule.INHERIT : rule;
}
/**
* Adds an access rule for a user and a node.
*
* @param userId the user id
* @param node the node
* @param rule the rule
* @return the access rule
*/
public void addRule(int userId, SiteTreeNode node, AccessRule rule) {
if (log.isDebugEnabled()) {
log.debug("Adding rule for user " + userId + " and node " + node + ": " + rule);
}
// If the rule is INHERIT (default), remove it from the rules mapping as there's no need to
// store it there
if (rule == AccessRule.INHERIT) {
getUserRules(userId).remove(node);
} else {
getUserRules(userId).put(node, rule);
}
}
/**
* Infers the rule that corresponds to a site tree node.
*
* <p>If a rule was explicitly defined for the specified node, it is returned directly.
* Otherwise, an inference algorithm is used to detect the matching rules for each node based on
* its ancestors in the URL: the rule inferred is the one that has been explicitly defined for
* the closest ancestor.
*
* <p>The root has a fixed corresponding value of {@link AccessRule#UNKNOWN}, so if no rules are
* specified for any of the ancestors of a node, it defaults to {@link AccessRule#UNKNOWN}.
*
* @param userId the user id
* @param node the node
* @return the access rule inferred
*/
public AccessRule inferRule(int userId, SiteTreeNode node) {
Map<SiteTreeNode, AccessRule> userRules = getUserRules(userId);
// First of all, check if we have an explicit rule for the node
AccessRule rule;
rule = userRules.get(node);
if (rule != null && rule != AccessRule.INHERIT) {
return rule;
}
String hostname;
List<String> path = null;
try {
path = context.getUrlParamParser().getTreePath(node.getUri());
hostname = UriUtils.getHostName(node.getUri());
} catch (URIException e) {
log.error("An error occurred while infering access rules: " + e.getMessage(), e);
return AccessRule.UNKNOWN;
}
// Find the node corresponding to the hostname of the url
AccessRule inferredRule = AccessRule.UNKNOWN;
SiteTreeNode parent = contextSiteTree.getRoot().findChild(hostname);
if (parent != null) {
rule = userRules.get(parent);
if (rule != null && rule != AccessRule.INHERIT) {
inferredRule = rule;
}
}
if (parent == null || path == null || path.isEmpty()) {
return inferredRule;
}
// Replace the last 'segment' of the path with the actual node name
path.set(path.size() - 1, node.getNodeName());
String pathSegment;
// Navigate the tree down trying to find the target node, making sure we store the defined
// access rule at each step, if different from INHERIT. This allows us to have the right
// behavior and infer the access rule. We start with UNKNOWN
for (int i = 0; i < path.size(); i++) {
pathSegment = path.get(i);
if (pathSegment != null && !pathSegment.equals("")) {
// Find the child node that matches the segment
parent = parent.findChild(pathSegment);
if (parent == null) {
log.warn(
"Unable to find path segment while infering rule for "
+ node
+ ": "
+ pathSegment);
break;
}
// Save it's access rule, if anything relevant
rule = userRules.get(parent);
if (rule != null && rule != AccessRule.INHERIT) {
inferredRule = rule;
}
}
}
return inferredRule;
}
/**
* Clear any existing rules and copies the access rules from another rules manager for the
* provided list of users (to which the "Unauthenticated user" is added).
*
* @param sourceManager the source manager
* @param users the users for which to copy rules
*/
public void copyRulesFrom(ContextAccessRulesManager sourceManager, List<User> users) {
this.rules.clear();
Map<SiteTreeNode, AccessRule> userRules;
// Copy the user rules for the provided users
for (User user : users) {
Map<SiteTreeNode, AccessRule> sourceRules = sourceManager.rules.get(user.getId());
if (sourceRules == null) {
continue;
}
userRules = new HashMap<>(sourceManager.rules.get(user.getId()));
if (userRules != null) {
this.rules.put(user.getId(), userRules);
}
}
// Also copy the rules for the unauthenticated user, which will always be there
Map<SiteTreeNode, AccessRule> sourceRules =
sourceManager.rules.get(UNAUTHENTICATED_USER_ID);
if (sourceRules != null) {
userRules = new HashMap<>(sourceManager.rules.get(UNAUTHENTICATED_USER_ID));
if (userRules != null) {
this.rules.put(UNAUTHENTICATED_USER_ID, userRules);
}
}
this.contextSiteTree = sourceManager.contextSiteTree;
}
public ContextSiteTree getContextSiteTree() {
return contextSiteTree;
}
public void reloadContextSiteTree(Session session) {
this.contextSiteTree.reloadTree(session, context);
}
/**
* Generates a list of string representations (serialization) of the rules contained in this
* rules manager. Each of the entries can later be imported using the {@link
* #importSerializedRule(String)} method.
*
* @return the list of representations
*/
protected List<String> exportSerializedRules() {
List<String> exported = new LinkedList<>();
StringBuilder serializedRule;
for (Entry<Integer, Map<SiteTreeNode, AccessRule>> userRulesEntry : rules.entrySet()) {
for (Entry<SiteTreeNode, AccessRule> ruleEntry : userRulesEntry.getValue().entrySet()) {
serializedRule = new StringBuilder(50);
serializedRule.append(userRulesEntry.getKey().toString());
serializedRule.append(SERIALIZATION_SEPARATOR);
serializedRule.append(ruleEntry.getValue().name()).append(SERIALIZATION_SEPARATOR);
// Note: encode the name as it may contain special characters
serializedRule.append(
Base64.encodeBase64String(ruleEntry.getKey().getNodeName().getBytes()));
serializedRule.append(SERIALIZATION_SEPARATOR);
// Note: there's no need to escape the URI as it's the last value of the
// serialization string and as we're using the URL escaped version (which cannot
// contain the separator)
serializedRule.append(ruleEntry.getKey().getUri().getEscapedURI());
exported.add(serializedRule.toString());
}
}
return exported;
}
/**
* Import a rule from a serialized representation. The rule should have been exported via the
* {@link #exportSerializedRules()} method.
*
* @param serializedRule the serialized rule
*/
protected void importSerializedRule(String serializedRule) {
try {
String[] values = serializedRule.split(Character.toString(SERIALIZATION_SEPARATOR), 4);
int userId = Integer.parseInt(values[0]);
AccessRule rule = AccessRule.valueOf(values[1]);
String nodeName = new String(Base64.decodeBase64(values[2]));
URI uri = new URI(values[3], true);
SiteTreeNode node = new SiteTreeNode(nodeName, uri);
getUserRules(userId).put(node, rule);
if (log.isDebugEnabled()) {
log.debug(
String.format(
"Imported access control rule (context, userId, node, rule): (%d, %d, %s, %s) ",
context.getIndex(), userId, uri.toString(), rule));
}
} catch (Exception ex) {
log.error(
"Unable to import serialized rule for context "
+ context.getIndex()
+ ":"
+ serializedRule,
ex);
}
}
/**
* Generates and returns the map of rules that are associated to SiteTreeNodes which don't exist
* in the current {@link ContextSiteTree}.
*
* @param userId the user id
* @return the map of rules which are associated to nodes not in the context tree
*/
public Map<SiteTreeNode, AccessRule> computeHangingRules(int userId) {
Map<SiteTreeNode, AccessRule> rules = new HashMap<>(getUserRules(userId));
if (rules.isEmpty()) {
return rules;
}
// We make a traversal of the context site tree and remove all nodes from the map
@SuppressWarnings("unchecked")
Enumeration<TreeNode> en = contextSiteTree.getRoot().depthFirstEnumeration();
while (en.hasMoreElements()) {
// Unfortunately the enumeration isn't genericized so we need to downcast when calling
// nextElement():
SiteTreeNode node = (SiteTreeNode) en.nextElement();
rules.remove(node);
}
if (log.isDebugEnabled()) {
log.debug(
String.format(
"Identified hanging rules for context %d and user %d: %s",
context.getIndex(), userId, rules));
}
return rules;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.MarshalException;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.utils.ByteBufferUtil;
/**
* Column is immutable, which prevents all kinds of confusion in a multithreaded environment.
* (TODO: look at making SuperColumn immutable too. This is trickier but is probably doable
* with something like PCollections -- http://code.google.com
*/
public class Column implements IColumn
{
private static Logger logger = LoggerFactory.getLogger(Column.class);
public static ColumnSerializer serializer()
{
return new ColumnSerializer();
}
protected final ByteBuffer name;
protected final ByteBuffer value;
protected final long timestamp;
Column(ByteBuffer name)
{
this(name, ByteBufferUtil.EMPTY_BYTE_BUFFER);
}
public Column(ByteBuffer name, ByteBuffer value)
{
this(name, value, 0);
}
public Column(ByteBuffer name, ByteBuffer value, long timestamp)
{
assert name != null;
assert value != null;
assert name.remaining() <= IColumn.MAX_NAME_LENGTH;
this.name = name;
this.value = value;
this.timestamp = timestamp;
}
public ByteBuffer name()
{
return name;
}
public Column getSubColumn(ByteBuffer columnName)
{
throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
}
public ByteBuffer value()
{
return value;
}
public Collection<IColumn> getSubColumns()
{
throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
}
public long timestamp()
{
return timestamp;
}
public boolean isMarkedForDelete()
{
return false;
}
public long getMarkedForDeleteAt()
{
throw new IllegalStateException("column is not marked for delete");
}
public long mostRecentLiveChangeAt()
{
return timestamp;
}
public int size()
{
/*
* Size of a column is =
* size of a name (short + length of the string)
* + 1 byte to indicate if the column has been deleted
* + 8 bytes for timestamp
* + 4 bytes which basically indicates the size of the byte array
* + entire byte array.
*/
return DBConstants.shortSize_ + name.remaining() + DBConstants.boolSize_ + DBConstants.tsSize_ + DBConstants.intSize_ + value.remaining();
}
/*
* This returns the size of the column when serialized.
* @see com.facebook.infrastructure.db.IColumn#serializedSize()
*/
public int serializedSize()
{
return size();
}
public void addColumn(IColumn column)
{
throw new UnsupportedOperationException("This operation is not supported for simple columns.");
}
public IColumn diff(IColumn column)
{
if (timestamp() < column.timestamp())
{
return column;
}
return null;
}
public void updateDigest(MessageDigest digest)
{
digest.update(name.duplicate());
digest.update(value.duplicate());
DataOutputBuffer buffer = new DataOutputBuffer();
try
{
buffer.writeLong(timestamp);
buffer.writeByte((isMarkedForDelete()) ? ColumnSerializer.DELETION_MASK : 0);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
digest.update(buffer.getData(), 0, buffer.getLength());
}
public int getLocalDeletionTime()
{
throw new IllegalStateException("column is not marked for delete");
}
public IColumn reconcile(IColumn column)
{
// tombstones take precedence. (if both are tombstones, then it doesn't matter which one we use.)
if (isMarkedForDelete())
return timestamp() < column.timestamp() ? column : this;
if (column.isMarkedForDelete())
return timestamp() > column.timestamp() ? this : column;
// break ties by comparing values.
if (timestamp() == column.timestamp())
return value().compareTo(column.value()) < 0 ? column : this;
// neither is tombstoned and timestamps are different
return timestamp() < column.timestamp() ? column : this;
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Column column = (Column)o;
if (timestamp != column.timestamp)
return false;
if (!name.equals(column.name))
return false;
return value.equals(column.value);
}
@Override
public int hashCode()
{
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + (int)(timestamp ^ (timestamp >>> 32));
return result;
}
public IColumn deepCopy()
{
return new Column(ByteBufferUtil.clone(name), ByteBufferUtil.clone(value), timestamp);
}
public String getString(AbstractType comparator)
{
StringBuilder sb = new StringBuilder();
sb.append(comparator.getString(name));
sb.append(":");
sb.append(isMarkedForDelete());
sb.append(":");
sb.append(value.remaining());
sb.append("@");
sb.append(timestamp());
return sb.toString();
}
public boolean isLive()
{
return !isMarkedForDelete();
}
protected void validateName(CFMetaData metadata) throws MarshalException
{
AbstractType nameValidator = metadata.cfType == ColumnFamilyType.Super ? metadata.subcolumnComparator : metadata.comparator;
nameValidator.validate(name());
}
public void validateFields(CFMetaData metadata) throws MarshalException
{
validateName(metadata);
AbstractType valueValidator = metadata.getValueValidator(name());
if (valueValidator != null)
valueValidator.validate(value());
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.changes.ignore;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationAction;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.components.StoragePathMacros;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.VcsApplicationSettings;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.VcsNotifier;
import com.intellij.openapi.vcs.changes.IgnoredFileContentProvider;
import com.intellij.openapi.vcs.changes.IgnoredFileGenerator;
import com.intellij.openapi.vcs.changes.IgnoredFileProvider;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.encoding.EncodingProjectManager;
import com.intellij.vcsUtil.VcsImplUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static com.intellij.openapi.vcs.VcsNotificationIdsHolder.MANAGE_IGNORE_FILES;
import static com.intellij.openapi.vcs.changes.ignore.IgnoreConfigurationProperty.ASKED_MANAGE_IGNORE_FILES_PROPERTY;
import static com.intellij.openapi.vcs.changes.ignore.IgnoreConfigurationProperty.MANAGE_IGNORE_FILES_PROPERTY;
import static java.lang.System.lineSeparator;
public class IgnoredFileGeneratorImpl implements IgnoredFileGenerator {
private static final Logger LOG = Logger.getInstance(IgnoredFileGeneratorImpl.class);
private final Project myProject;
private final Object myWriteLock = new Object();
private static final Object myNotificationLock = new Object();
@Nullable
private static Notification myNotification;
@Nullable
private static VirtualFile myIgnoreFileRootNotificationShowFor;
protected IgnoredFileGeneratorImpl(@NotNull Project project) {
myProject = project;
}
@Override
public void generateFile(@NotNull VirtualFile ignoreFileRoot, @NotNull AbstractVcs vcs, boolean notify) {
doGenerate(ignoreFileRoot, vcs, notify);
}
private void doGenerate(@NotNull VirtualFile ignoreFileRoot, @NotNull AbstractVcs vcs, boolean notify) {
if (skipGeneration(ignoreFileRoot, notify)) return;
IgnoredFileContentProvider ignoredFileContentProvider = VcsImplUtil.findIgnoredFileContentProvider(vcs);
if (ignoredFileContentProvider == null) {
LOG.debug("Cannot find content provider for vcs " + vcs.getName());
return;
}
String ignoreFileName = ignoredFileContentProvider.getFileName();
synchronized (myWriteLock) {
String ignoreFileContent =
ignoredFileContentProvider.buildIgnoreFileContent(ignoreFileRoot, IgnoredFileProvider.IGNORE_FILE.getExtensions());
if (StringUtil.isEmptyOrSpaces(ignoreFileContent)) return;
File ignoreFile = getIgnoreFile(ignoreFileRoot, ignoreFileName);
if (notify && needAskToManageIgnoreFiles(myProject)) {
notifyVcsIgnoreFileManage(myProject, ignoreFileRoot, ignoredFileContentProvider,
() -> writeToFile(ignoreFileRoot, ignoreFile, ignoreFileContent, true));
}
else {
writeToFile(ignoreFileRoot, ignoreFile, ignoreFileContent, false);
}
}
}
private boolean skipGeneration(@NotNull VirtualFile ignoreFileRoot, boolean notify) {
if (notify && !needGenerateIgnoreFile(myProject, ignoreFileRoot)) { // notify == true for non-internal ignore file generation
LOG.debug("Skip VCS ignore file generation");
return true;
}
else if (!needGenerateInternalIgnoreFile(myProject, ignoreFileRoot)) {
LOG.debug("Skip VCS internal ignore file generation");
return true;
}
return false;
}
private void writeToFile(@NotNull VirtualFile ignoreFileRoot, @NotNull File ignoreFile, @NotNull String ignoreFileContent, boolean openFile) {
boolean append = ignoreFile.exists();
String projectCharsetName = EncodingProjectManager.getInstance(myProject).getDefaultCharsetName();
try {
if (append) {
FileUtil.writeToFile(ignoreFile, (lineSeparator() + ignoreFileContent).getBytes(projectCharsetName), true);
}
else {
//create ignore file with VFS to prevent externally added files detection
WriteAction.runAndWait(() -> {
VirtualFile newIgnoreFile = ignoreFileRoot.createChildData(ignoreFileRoot, ignoreFile.getName());
VfsUtil.saveText(newIgnoreFile, ignoreFileContent);
});
}
}
catch (IOException e) {
LOG.warn("Cannot write to file " + ignoreFile.getPath());
}
markIgnoreFileRootAsGenerated(myProject, ignoreFile.getParent());
LocalFileSystem.getInstance().refreshIoFiles(Collections.singleton(ignoreFile));
if (openFile) {
openFile(ignoreFile);
}
}
private void openFile(@NotNull File file) {
ApplicationManager.getApplication().invokeLater(() -> {
VirtualFile vFile = VfsUtil.findFileByIoFile(file, true);
if (vFile == null) return;
new OpenFileDescriptor(myProject, vFile).navigate(true);
});
}
private static void notifyVcsIgnoreFileManage(@NotNull Project project,
@NotNull VirtualFile ignoreFileRoot,
@NotNull IgnoredFileContentProvider ignoredFileContentProvider,
@NotNull Runnable writeToIgnoreFile) {
PropertiesComponent propertiesComponent = PropertiesComponent.getInstance(project);
VcsApplicationSettings applicationSettings = VcsApplicationSettings.getInstance();
synchronized (myNotificationLock) {
if (myNotification != null &&
myIgnoreFileRootNotificationShowFor != null &&
!myNotification.isExpired() &&
myIgnoreFileRootNotificationShowFor.equals(ignoreFileRoot)) {
return;
}
myIgnoreFileRootNotificationShowFor = ignoreFileRoot;
myNotification = VcsNotifier.getInstance(project).notifyMinorInfo(
MANAGE_IGNORE_FILES,
true,
"",
VcsBundle.message("ignored.file.manage.message",
ApplicationNamesInfo.getInstance().getFullProductName(), ignoredFileContentProvider.getFileName()),
NotificationAction.create(VcsBundle.messagePointer("ignored.file.manage.this.project"), (event, notification) -> {
writeToIgnoreFile.run();
propertiesComponent.setValue(MANAGE_IGNORE_FILES_PROPERTY, true);
propertiesComponent.setValue(ASKED_MANAGE_IGNORE_FILES_PROPERTY, true);
synchronized (myNotificationLock) {
notification.expire();
myIgnoreFileRootNotificationShowFor = null;
}
}),
NotificationAction.create(VcsBundle.messagePointer("ignored.file.manage.all.project"), (event, notification) -> {
writeToIgnoreFile.run();
applicationSettings.MANAGE_IGNORE_FILES = true;
propertiesComponent.setValue(ASKED_MANAGE_IGNORE_FILES_PROPERTY, true);
synchronized (myNotificationLock) {
notification.expire();
myIgnoreFileRootNotificationShowFor = null;
}
}),
NotificationAction.create(VcsBundle.messagePointer("ignored.file.manage.notmanage"), (event, notification) -> {
propertiesComponent.setValue(ASKED_MANAGE_IGNORE_FILES_PROPERTY, true);
synchronized (myNotificationLock) {
notification.expire();
myIgnoreFileRootNotificationShowFor = null;
}
}));
}
}
@NotNull
private static File getIgnoreFile(@NotNull VirtualFile ignoreFileRoot, @NotNull String ignoreFileName) {
File vcsRootFile = VfsUtilCore.virtualToIoFile(ignoreFileRoot);
return new File(vcsRootFile.getPath(), ignoreFileName);
}
public static boolean needGenerateInternalIgnoreFile(@NotNull Project project, @NotNull VirtualFile ignoreFileRoot) {
boolean wasGeneratedPreviously = IgnoredFileRootStore.getInstance(project).containsRoot(ignoreFileRoot.getPath());
if (wasGeneratedPreviously) {
LOG.debug("Ignore file generated previously for root " + ignoreFileRoot.getPath());
return false;
}
return true;
}
public static void markIgnoreFileRootAsGenerated(@NotNull Project project, @NotNull String ignoreFileRoot){
IgnoredFileRootStore.getInstance(project).addRoot(ignoreFileRoot);
}
private static boolean needGenerateIgnoreFile(@NotNull Project project, @NotNull VirtualFile ignoreFileRoot) {
VcsApplicationSettings vcsApplicationSettings = VcsApplicationSettings.getInstance();
if (vcsApplicationSettings.DISABLE_MANAGE_IGNORE_FILES) return false;
boolean wasGeneratedPreviously = IgnoredFileRootStore.getInstance(project).containsRoot(ignoreFileRoot.getPath());
if (wasGeneratedPreviously) {
LOG.debug("Ignore file generated previously for root " + ignoreFileRoot.getPath());
return false;
}
boolean needGenerateRegistryFlag = Registry.is("vcs.ignorefile.generation", true);
if (!needGenerateRegistryFlag) {
return false;
}
PropertiesComponent propertiesComponent = PropertiesComponent.getInstance(project);
boolean askedToManageIgnores = propertiesComponent.getBoolean(ASKED_MANAGE_IGNORE_FILES_PROPERTY, false);
return isManageIgnoreTurnOn(project) || !askedToManageIgnores;
}
private static boolean isManageIgnoreTurnOn(@NotNull Project project){
boolean globalManageIgnores = VcsApplicationSettings.getInstance().MANAGE_IGNORE_FILES;
PropertiesComponent propertiesComponent = PropertiesComponent.getInstance(project);
boolean manageIgnoresInProject = propertiesComponent.getBoolean(MANAGE_IGNORE_FILES_PROPERTY, false);
return globalManageIgnores || manageIgnoresInProject;
}
private static boolean needAskToManageIgnoreFiles(@NotNull Project project) {
PropertiesComponent propertiesComponent = PropertiesComponent.getInstance(project);
boolean askedToManageIgnores = propertiesComponent.getBoolean(ASKED_MANAGE_IGNORE_FILES_PROPERTY, false);
return !askedToManageIgnores && !isManageIgnoreTurnOn(project);
}
@State(name = "IgnoredFileRootStore", storages = @Storage(StoragePathMacros.PRODUCT_WORKSPACE_FILE))
final static class IgnoredFileRootStore implements PersistentStateComponent<IgnoredFileRootStore.State> {
static class State {
public Set<String> generatedRoots = new HashSet<>();
}
State myState = new State();
static IgnoredFileRootStore getInstance(Project project) {
return project.getService(IgnoredFileRootStore.class);
}
boolean containsRoot(@NotNull String root) {
return myState.generatedRoots.contains(root);
}
void addRoot(@NotNull String root) {
myState.generatedRoots.add(root);
}
@Nullable
@Override
public State getState() {
return myState;
}
@Override
public void loadState(@NotNull State state) {
myState = state;
}
}
}
| |
package com.github.lindenb.jvarkit.tools.splitbam;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.github.lindenb.jvarkit.io.IOUtils;
import com.github.lindenb.jvarkit.util.picard.AbstractCommandLineProgram;
import com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryFactory;
import com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress;
import com.github.lindenb.jvarkit.util.picard.SamFileReaderFactory;
import com.github.lindenb.jvarkit.util.picard.cmdline.Option;
import com.github.lindenb.jvarkit.util.picard.cmdline.StandardOptionDefinitions;
import com.github.lindenb.jvarkit.util.picard.cmdline.Usage;
import htsjdk.samtools.util.Log;
import htsjdk.samtools.BAMIndex;
import htsjdk.samtools.DefaultSAMRecordFactory;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMFileHeader.SortOrder;
import htsjdk.samtools.SamReader;
import htsjdk.samtools.SAMFileWriter;
import htsjdk.samtools.SAMFileWriterFactory;
//import htsjdk.samtools.SAMProgramRecord;
import htsjdk.samtools.SAMReadGroupRecord;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMRecordFactory;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.util.SequenceUtil;
/***
*
* SplitBam
*
*/
@Deprecated
public class SplitBam extends AbstractCommandLineProgram
{
private static final Log LOG=Log.getInstance(SplitBam.class);
@Usage(programVersion="1.0")
public String USAGE=getStandardUsagePreamble()+"Split a BAM by chromosome group.";
@Option(shortName= StandardOptionDefinitions.REFERENCE_SHORT_NAME, doc="Indexex reference",optional=false)
public File REF=null;
@Option(shortName= StandardOptionDefinitions.INPUT_SHORT_NAME, doc="BAM file to process. Default stdin. ",optional=true)
public File IN=null;
@Option(shortName= "EMPTY_BAM", doc="generate EMPTY bams for chromosome having no read mapped. ",optional=true)
public boolean GENERATE_EMPTY_BAM=false;
@Option(shortName= "GP", doc="Chromosome group file. ",optional=true)
public File CHROM_GROUP=null;
@Option(shortName= "MOCK", doc="add a mock pair of sam records to the bam. ",optional=true)
public boolean ADD_MOCK_RECORD=false;
private final static String REPLACE_CHROM="__CHROM__";
@Option(shortName= "OFP", doc="MUST contain "+REPLACE_CHROM+" and end with .bam. ",optional=false)
public String OUT_FILE_PATTERN="";
@Option(shortName= "UN", doc="Unmapped chromosome name. ",optional=true)
public String UNDERTERMINED_NAME="Unmapped";
@Option(shortName= "IS", doc="input is sorted. ",optional=true)
public boolean INPUT_IS_SORTED=false;
private SAMSequenceDictionary samSequenceDictionary;
private long id_generator=System.currentTimeMillis();
private Set<File> deleteOnError=new HashSet<File>();
public SplitBam()
{
}
private void addMockPair(
SAMFileWriter sw,
SAMFileHeader header
) throws IOException
{
List<SAMReadGroupRecord> G=header.getReadGroups();
String bases="NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN";
SAMRecordFactory f=new DefaultSAMRecordFactory();
++id_generator;
for(int i=0;i< 2;++i)
{
SAMRecord rec=f.createSAMRecord(header);
rec.setFirstOfPairFlag(i%2==0);
rec.setSecondOfPairFlag(i%2==1);
rec.setReadBases(bases.getBytes());
rec.setMappingQuality(0);
rec.setBaseQualityString(bases.replace('N', '#'));
rec.setReadUnmappedFlag(true);
rec.setMateUnmappedFlag(true);
rec.setReadPairedFlag(true);
String readName="MOCKREAD"+(id_generator)+":6:190:289:82";
rec.setReadName(readName);
LOG.info("generating mock read: "+readName);
rec.setAttribute("MK",1);
if(G!=null && !G.isEmpty())
{
rec.setAttribute("RG", G.get(0).getId());
}
sw.addAlignment(rec);
}
}
private void createEmptyFile(
SAMFileWriterFactory sf,
SAMFileHeader header,
String groupName
) throws IOException
{
File fileout=new File(this.OUT_FILE_PATTERN.replaceAll(REPLACE_CHROM, groupName));
LOG.info("creating mock BAM file "+fileout);
File parent=fileout.getParentFile();
if(parent!=null) parent.mkdirs();
SAMFileWriter sw=sf.makeBAMWriter(header, true, fileout);
deleteOnError.add(fileout);
deleteOnError.add(indexFor(fileout));
if(this.ADD_MOCK_RECORD)
{
addMockPair(sw,header);
}
sw.close();
}
private static class ManyToMany
{
private java.util.Map<String,Set<String>> group2chroms=new java.util.HashMap<String, java.util.Set<String>>();
private java.util.Map<String,String> chrom2group=new java.util.HashMap<String, String>();
public void set(String group,String chrom)
{
if(containsChrom(chrom)) throw new IllegalArgumentException("chrom "+chrom+" already defined for group "+ chrom2group.get(chrom));
java.util.Set<String> set=group2chroms.get(group);
if(set==null)
{
set=new java.util.LinkedHashSet<String>();
group2chroms.put(group,set);
}
set.add(chrom);
chrom2group.put(chrom,group);
}
public boolean containsGroup(String s)
{
return group2chroms.containsKey(s);
}
public boolean containsChrom(String s)
{
return chrom2group.containsKey(s);
}
}
private void scan(InputStream in) throws Exception
{
ManyToMany many2many=new ManyToMany();
many2many.set(this.UNDERTERMINED_NAME, this.UNDERTERMINED_NAME);
if(this.CHROM_GROUP!=null)
{
Set<String> all_chromosomes=new HashSet<String>();
for(SAMSequenceRecord seq:this.samSequenceDictionary.getSequences())
{
all_chromosomes.add(seq.getSequenceName());
}
BufferedReader r=IOUtils.openFileForBufferedReading(this.CHROM_GROUP);
String line;
while((line=r.readLine())!=null)
{
if(line.isEmpty() || line.startsWith("#")) continue;
String tokens[] =line.split("[ \t,]+");
String groupName=tokens[0].trim();
if(groupName.isEmpty()) throw new IOException("Empty group name in "+line);
if(many2many.containsGroup(groupName)) throw new IOException("Group defined twice "+groupName);
for(int i=1;i< tokens.length;i++)
{
String chromName=tokens[i].trim();
if(!all_chromosomes.contains(chromName))
{
throw new IOException("chrom "+chromName+" undefined in ref dict");
}
if(chromName.isEmpty()) continue;
many2many.set(groupName,chromName);
}
}
r.close();
}
for(SAMSequenceRecord seq:this.samSequenceDictionary.getSequences())
{
String chromName=seq.getSequenceName();
if(many2many.containsChrom(chromName)) continue;
if(many2many.containsGroup(chromName))
{
throw new IOException("cannot create chrom group "+chromName+" because it is already defined.");
}
many2many.set(chromName,chromName);
}
Map<String,SAMFileWriter> seen=new HashMap<String,SAMFileWriter>(many2many.group2chroms.size());
SamReader samFileReader=SamFileReaderFactory.mewInstance().open(in);
final SAMFileHeader header=samFileReader.getFileHeader();
header.setSortOrder(SortOrder.coordinate);
if(!SequenceUtil.areSequenceDictionariesEqual(
header.getSequenceDictionary(),
this.samSequenceDictionary)
)
{
samFileReader.close();
throw new RuntimeException("Not the same sequence dictionary BAM vs "+REF);
}
/*
problem of parsing with GATK 2.6 : ignore this for the moment.
SAMProgramRecord sp=new SAMProgramRecord(getClass().getSimpleName());
sp.setProgramName(getClass().getSimpleName());
sp.setProgramVersion(String.valueOf(getProgramVersion()));
sp.setPreviousProgramGroupId(getClass().getSimpleName());
sp.setCommandLine(getCommandLine().replaceAll("[ \\s]+"," "));
header.addProgramRecord(sp);
*/
SAMFileWriterFactory sf=new SAMFileWriterFactory();
if(!super.TMP_DIR.isEmpty())
{
sf.setTempDirectory(super.TMP_DIR.get(0));
}
if(super.CREATE_INDEX!=null)
{
sf.setCreateIndex(super.CREATE_INDEX);
}
SAMSequenceDictionaryProgress progress=new SAMSequenceDictionaryProgress(samFileReader.getFileHeader()==null?null:samFileReader.getFileHeader().getSequenceDictionary());
for(Iterator<SAMRecord> iter=samFileReader.iterator();
iter.hasNext(); )
{
SAMRecord record=iter.next();
progress.watch(record);
String recordChromName=null;
if( record.getReadUnmappedFlag() )
{
if(!record.getReadPairedFlag() || record.getMateUnmappedFlag())
{
recordChromName=this.UNDERTERMINED_NAME;
}
else
{
recordChromName=record.getMateReferenceName();
}
}
else
{
recordChromName=record.getReferenceName();
}
String groupName=many2many.chrom2group.get(recordChromName);
if(groupName==null)
{
samFileReader.close();
throw new IOException("Undefined group/chrom for "+recordChromName+" (not in ref dictionary "+many2many.chrom2group.keySet()+").");
}
SAMFileWriter writer=seen.get(groupName);
if(writer==null)
{
File fileout=new File(this.OUT_FILE_PATTERN.replaceAll(REPLACE_CHROM, groupName));
LOG.info("opening "+fileout);
File parent=fileout.getParentFile();
if(parent!=null) parent.mkdirs();
writer=sf.makeBAMWriter(
header,
this.INPUT_IS_SORTED,
fileout,
super.COMPRESSION_LEVEL
);
deleteOnError.add(fileout);
deleteOnError.add(indexFor(fileout));
seen.put(groupName, writer);
}
writer.addAlignment(record);
}
for(String k:seen.keySet())
{
LOG.info("closing group "+k);
seen.get(k).close();
}
samFileReader.close();
if(this.GENERATE_EMPTY_BAM)
{
for(String groupName:many2many.group2chroms.keySet())
{
if(seen.containsKey(groupName)) continue;
createEmptyFile(sf,header,groupName);
}
}
deleteOnError.clear();
}
@Override
protected int doWork()
{
try
{
if(this.ADD_MOCK_RECORD)
{
this.GENERATE_EMPTY_BAM=true;
}
if(!OUT_FILE_PATTERN.contains(REPLACE_CHROM))
{
LOG.error("output file pattern undefined or doesn't contain "+REPLACE_CHROM);
return -1;
}
if(REF==null)
{
LOG.error("Reference file undefined");
System.exit(-1);
}
this.samSequenceDictionary=new SAMSequenceDictionaryFactory().load(REF);
if(this.samSequenceDictionary==null)
{
LOG.error("Reference file dictionary missing. use picard to create it.");
return -1;
}
if(this.IN==null)
{
LOG.info("reading stdin");
scan(System.in);
}
else
{
LOG.info("reading "+IN);
FileInputStream fin=new FileInputStream(IN);
scan(fin);
fin.close();
}
return 0;
}
catch(Exception err)
{
err.printStackTrace();
for(File f:deleteOnError)
{
if(f!=null && f.exists() && f.isFile())
{
LOG.info("Cleanup: delete "+f);
f.delete();
}
}
super.testRemoteGit();
return -1;
}
finally
{
}
}
private static File indexFor(File bamFile)
{
final String bamExtension = ".bam";
String fileName=bamFile.getName();
final String bai = fileName.substring(0, fileName.length() - bamExtension.length()) + BAMIndex.BAMIndexSuffix;
return new File(bamFile.getParent(), bai);
}
@Override
public String getProgramVersion() {
return "1.0";
}
public static void main(String[] args) throws Exception
{
new SplitBam().instanceMainWithExit(args);
}
}
| |
package mapconstruction2;
/**
* Frechet-based map construction 2.0 Copyright 2013 Mahmuda Ahmed and Carola Wenk
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
* ------------------------------------------------------------------------
*
* This software is based on the following article. Please cite this article when using this code
* as part of a research publication:
*
* Mahmuda Ahmed and Carola Wenk, "Constructing Street Networks from GPS Trajectories", European
* Symposium on Algorithms (ESA): 60-71, Ljubljana, Slovenia, 2012
*
* ------------------------------------------------------------------------
*
* Author: Mahmuda Ahmed Filename: MapConstruction.java
*
*/
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* An object that represents a track.
*
*/
class PoseFile {
String fileName;
ArrayList<Vertex> curve;
PoseFile() {
this.fileName = "";
this.curve = new ArrayList<Vertex>();
}
PoseFile(String curveName, ArrayList<Vertex> curve) {
this.fileName = curveName;
this.curve = curve;
}
public String getFileName() {
return fileName;
}
public ArrayList<Vertex> getPose() {
return curve;
}
public double getLength() {
double length = 0;
for (int i = 1; i < curve.size(); i++) {
length = length + curve.get(i - 1).dist(curve.get(i));
}
return length;
}
public static PoseFile readFile(File inputFile, boolean hasAltitude) {
PoseFile poseFile = new PoseFile();
poseFile.fileName = inputFile.getName();
String str = "";
try {
BufferedReader in = new BufferedReader(new FileReader(
inputFile.getAbsolutePath()));
double prev_time = 0;
double x, y, z;
while ((str = in.readLine()) != null) {
StringTokenizer strToken = new StringTokenizer(str);
// strToken.nextToken();
// track file in "x y timestamp" or "x y z timestamp" format
x = Double.parseDouble(strToken.nextToken());
y = Double.parseDouble(strToken.nextToken());
if (hasAltitude) {
z = Double.parseDouble(strToken.nextToken());
} else {
z = 0.0;
}
double timestamp = Double.parseDouble(strToken.nextToken());
Vertex newPoint = new Vertex(x, y, z, timestamp);
if (poseFile.curve.size() > 0) {
double dist = newPoint.dist(poseFile.curve
.get(poseFile.curve.size() - 1));
if (timestamp - prev_time > 120)
break;
if (dist > 2.0)
poseFile.curve.add(newPoint);
} else {
poseFile.curve.add(newPoint);
}
prev_time = timestamp;
}
in.close();
} catch (Exception e) {
e.printStackTrace();
}
return poseFile;
}
}
/**
* An object that takes a set of poses as input, construct graph and write two
* files one for vertices and one for edges.
*/
public class MapConstruction {
public static int curveid; // counter for pose
public static String curveName; // file name for the pose
private static final Logger logger = Logger.getAnonymousLogger();
/**
* Writes the constructed map into files.
*/
public static void writeToFile(List<Vertex> vList, String fileName) {
try {
int count = 0;
BufferedWriter bwedges = new BufferedWriter(new FileWriter(fileName
+ "edges.txt"));
BufferedWriter bvertex = new BufferedWriter(new FileWriter(fileName
+ "vertices.txt"));
for (int i = 0; i < vList.size(); i++) {
Vertex v = vList.get(i);
bvertex.write(i + "," + v.getX() + "," + v.getY() +","+ v.getZ() + "\n");
for (int j = 0; j < v.getDegree(); j++) {
if (i != v.getAdjacentElementAt(j)) {
bwedges.write(count + "," + i + ","
+ v.getAdjacentElementAt(j) + "\n");
count++;
}
}
}
bwedges.close();
bvertex.close();
} catch (Exception ex) {
System.out.println(ex.toString());
}
}
/**
* Computes interval on edge e for a line segment consists of
* (currentIndex-1)-th and currentIndex-th vertices of pose and return true
* if edge e has a part of white interval else false.
*/
public boolean isWhiteInterval(Edge edge, List<Vertex> pose,
int currentIndex, double eps, double altEps) {
Line line = new Line(pose.get(currentIndex - 1), pose.get(currentIndex));
if (Math.abs(line.avgAltitude() - edge.getLine().avgAltitude()) <= altEps) {
return line.pIntersection(edge, eps);
} else {
return false;
}
}
/**
* Sets corresponding interval endpoints on Edge.
*/
public void setEndPointsOnEdge(Edge edge, int startIndex, int endIndex,
double cstart, double vstart) {
edge.setCurveStartIndex(startIndex);
edge.setCurveStart(startIndex + cstart);
edge.setEdgeStart(vstart);
edge.setCurveEnd(endIndex - 1 + edge.getCurveEnd());
edge.setCurveEndIndex(endIndex);
}
/**
* Scans for next white interval on an Edge starting from index newstart of
* pose.
*/
public void computeNextInterval(Edge edge, List<Vertex> pose, int newstart,
double eps, double altEps) {
// Compute next white interval on edge.
boolean first = true;
boolean debug = false;
int startIndex = 0;
double cstart = 0, vstart = 0;
if (newstart >= pose.size()) {
edge.setCurveEndIndex(pose.size());
edge.setDone(true);
return;
}
for (int i = newstart; i < pose.size(); i++) {
boolean result = isWhiteInterval(edge, pose, i, eps, altEps);
// first = true means we are still looking for our first interval
// starting from newstart.
// !result indicate Line(pose.get(i), pose.get(i+1)) doesn't contain
// white interval.
// we can just ignore if(first && !result).
if (first && result) {
// first segment on the white interval
first = false;
startIndex = i - 1;
cstart = edge.getCurveStart();
vstart = edge.getEdgeStart();
// if the white interval ends within the same segment
if (edge.getCurveEnd() < 1) {
this.setEndPointsOnEdge(edge, startIndex, i, cstart, vstart);
return;
}
} else if (!first && result) {
// not the first segment on the white interval
if (edge.getCurveEnd() < 1) {
// if the white interval ends within that segment
this.setEndPointsOnEdge(edge, startIndex, i, cstart, vstart);
return;
}
} else if (!first && !result) {
// the white interval ends at 1.0 of previous segment
this.setEndPointsOnEdge(edge, startIndex, i, cstart, vstart);
return;
}
}
if (first) {
// if the last segment on the curve is the first segment of that
// interval
edge.setCurveEndIndex(pose.size());
edge.setDone(true);
} else {
edge.setCurveStartIndex(startIndex);
edge.setCurveStart(startIndex + cstart);
edge.setEdgeStart(vstart);
edge.setCurveEnd(pose.size() - 2 + edge.getCurveEnd());
edge.setCurveEndIndex(pose.size() - 2);
}
return;
}
/**
* Updates constructedMap by adding an Edge. Detail description of the
* algorithm is in the publication.
*/
public void updateMap(List<Vertex> constructedMap,
Map<String, Integer> map, Edge edge) {
// update the map by adding a new edge
Vertex v;
int parent = -1;
int child = -1;
String keyParent = edge.getVertex1().toString();
String keyChild = edge.getVertex2().toString();
// find the index of parent node
if (map.containsKey(keyParent)) {
parent = map.get(keyParent).intValue();
} else {
v = edge.getVertex1();
constructedMap.add(v);
parent = constructedMap.indexOf(v);
map.put(keyParent, parent);
}
// find the index of child node
if (map.containsKey(keyChild)) {
child = map.get(keyChild).intValue();
} else {
v = edge.getVertex2();
constructedMap.add(v);
child = constructedMap.indexOf(v);
map.put(keyChild, child);
}
// update the map
if (parent == -1 || child == -1) {
logger.log(Level.SEVERE, "inconsistent graph child, parent :"
+ child + ", " + parent);
} else if (parent != child) {
constructedMap.get(parent).addElementAdjList(child);
constructedMap.get(child).addElementAdjList(parent);
logger.log(Level.FINEST, "child, parent :" + child + ", " + parent);
logger.log(Level.FINEST, "child, parent :" + parent + ", " + child);
}
}
/**
* Adds a split point on an Edge.
*
* @param newVertexPosition
* represents position of a new Vertex
*/
public void edgeSplit(List<Vertex> constructedMap,
Map<String, Integer> map, Edge edge, double newVertexPosition) {
Vertex v1 = edge.getVertex1();
Vertex v2 = edge.getVertex2();
String key1 = v1.toString();
String key2 = v2.toString();
// call of this method always after updateMap which ensures
// map.containsKey(key1) is
// always true.
int index1 = map.get(key1).intValue();
int index2 = map.get(key2).intValue();
Vertex v = edge.getLine().getVertex(newVertexPosition);
// splitting an edge on split point vertex v
String key = v.toString();
int index = map.get(key).intValue();
if (index == index1 || index == index2) {
return;
}
logger.log(Level.FINER, "Index = " + index1 + " " + index2 + " "
+ index);
edge.addSplit(newVertexPosition, index);
}
/**
* Commits edge splitting listed in List<Integer> Edge.edgeSplitVertices.
*/
public void commitEdgeSplits(List<Edge> edges, Map<String, Integer> map,
List<Vertex> graph) {
if (edges.size() != 2) {
// logger.log(Level.SEVERE, "created.");
return;
}
Edge edge = edges.get(0);
for (int i = 0; i < edges.get(1).getEdgeSplitPositions().size(); i++) {
double newPosition = 1 - edges.get(1).getEdgeSplitPositions()
.get(i).doubleValue();
edge.addSplit(newPosition,
edges.get(1).getEdgeSplitVertices().get(i));
}
List<Integer> edgeVertexSplits = edge.getEdgeSplitVertices();
int splitSize = edgeVertexSplits.size();
if (splitSize == 0) {
return;
}
Vertex v1 = edge.getVertex1();
Vertex v2 = edge.getVertex2();
String key1 = v1.toString();
String key2 = v2.toString();
int index1 = map.get(key1).intValue();
int index2 = map.get(key2).intValue();
boolean updateV1 = false, updateV2 = false;
logger.log(Level.FINER, "commitEdgeSplits " + splitSize);
for (int i = 0; i < v1.getDegree(); i++) {
if (v1.getAdjacentElementAt(i) == index2) {
v1.setAdjacentElementAt(i, edgeVertexSplits.get(0).intValue());
graph.get(edgeVertexSplits.get(0).intValue())
.addElementAdjList(index1);
updateV1 = true;
}
}
for (int i = 0; i < v2.getDegree(); i++) {
if (v2.getAdjacentElementAt(i) == index1) {
v2.setAdjacentElementAt(i, edgeVertexSplits.get(splitSize - 1)
.intValue());
graph.get(edgeVertexSplits.get(splitSize - 1).intValue())
.addElementAdjList(index2);
updateV2 = true;
}
}
for (int i = 0; i < splitSize - 1; i++) {
int currentVertex = edgeVertexSplits.get(i).intValue();
int nextVertex = edgeVertexSplits.get(i + 1).intValue();
graph.get(currentVertex).addElementAdjList(nextVertex);
graph.get(nextVertex).addElementAdjList(currentVertex);
}
if (!(updateV1 && updateV2)) {
logger.log(Level.SEVERE, "inconsistent graph: (" + splitSize + ")"
+ index1 + " " + index2 + " "
+ v1.getAdjacencyList().toString() + " "
+ v2.getAdjacencyList().toString());
}
}
/**
* Commits edge splitting for all edges.
*/
public void commitEdgeSplitsAll(List<Vertex> constructedMap,
Map<String, Integer> map, Map<String, ArrayList<Edge>> siblingMap,
List<Edge> edges) {
for (int i = 0; i < edges.size(); i++) {
String key1 = edges.get(i).getVertex1().toString() + " "
+ edges.get(i).getVertex2().toString();
String key2 = edges.get(i).getVertex2().toString() + " "
+ edges.get(i).getVertex1().toString();
ArrayList<Edge> siblings1, siblings2;
if (siblingMap.containsKey(key1))
siblings1 = siblingMap.get(key1);
else {
siblings1 = new ArrayList<Edge>();
}
if (siblingMap.containsKey(key2))
siblings2 = siblingMap.get(key2);
else {
siblings2 = new ArrayList<Edge>();
}
if (siblings1.size() != 0) {
this.commitEdgeSplits(siblings1, map, constructedMap);
siblingMap.remove(key1);
} else if (siblings2.size() != 0) {
this.commitEdgeSplits(siblings2, map, constructedMap);
siblingMap.remove(key2);
}
}
}
/**
* Adds a portion of a pose as edges into constructedMap.
*/
public void addToGraph(List<Vertex> constructedMap, List<Vertex> pose,
Map<String, Integer> map, int startIndex, int endIndex) {
for (int i = startIndex; i < endIndex; i++) {
this.updateMap(constructedMap, map,
new Edge(pose.get(i), pose.get(i + 1)));
}
}
/**
* Updates siblingHashmap for an edge.
*/
public void updateSiblingHashMap(Map<String, ArrayList<Edge>> siblingMap,
Edge edge) {
String key1 = edge.getVertex1().toString() + " "
+ edge.getVertex2().toString();
String key2 = edge.getVertex2().toString() + " "
+ edge.getVertex1().toString();
Collection<Edge> siblings1, siblings2;
if (siblingMap.containsKey(key1)) {
siblings1 = siblingMap.get(key1);
} else {
siblings1 = new ArrayList<Edge>();
}
if (siblingMap.containsKey(key1)) {
siblings2 = siblingMap.get(key2);
} else {
siblings2 = new ArrayList<Edge>();
}
if (siblings1.size() == 0 && siblings2.size() == 0) {
siblingMap.put(key1, new ArrayList<Edge>());
siblingMap.get(key1).add(edge);
} else if (siblings1.size() != 0) {
siblings1.add(edge);
} else if (siblings2.size() != 0) {
siblings2.add(edge);
}
}
/**
* Update the map for a pose/curve. Definition of black and white interval.
*/
// @TODO(mahmuda): extract some shorter well-named methods.
public void mapConstruction(List<Vertex> constructedMap, List<Edge> edges,
Map<String, Integer> map, List<Vertex> pose, double eps,
double altEps) {
PriorityQueue<Edge> pq = new PriorityQueue<Edge>();
for (int i = 0; i < edges.size(); i++) {
this.computeNextInterval(edges.get(i), pose, 1, eps, altEps);
if (!edges.get(i).getDone()) {
pq.add(edges.get(i));
}
}
try {
// The whole curve will be added as an edge because no white
// interval
if (pq.isEmpty()) {
logger.log(Level.FINER, MapConstruction.curveName
+ " inserted as an edge");
this.addToGraph(constructedMap, pose, map, 0, pose.size() - 1);
logger.log(Level.FINER, MapConstruction.curveName
+ " inserted as an edge");
return;
}
Edge edge = pq.poll();
double cend = edge.getCurveEnd();
Edge cedge = edge;
// There is a black interval until edge.curveStart
if (edge.getCurveStart() > 0) {
logger.log(Level.FINER, MapConstruction.curveName
+ " inserted as an edge until " + edge.getCurveStart());
int index = (int) Math.floor(edge.getCurveStart());
this.addToGraph(constructedMap, pose, map, 0, index);
Line newLine = new Line(pose.get(index), pose.get(index + 1));
double t = edge.getCurveStart()
- Math.floor(edge.getCurveStart());
this.updateMap(constructedMap, map, new Edge(pose.get(index),
newLine.getVertex(t)));
this.updateMap(constructedMap, map,
new Edge(newLine.getVertex(t), edge.getLine()
.getVertex(edge.getEdgeStart())));
this.edgeSplit(constructedMap, map, edge, edge.getEdgeStart());
}
// the while loop will search through all the intervals until we
// reach the end of the pose
while (cend < pose.size()) {
logger.log(Level.FINEST, MapConstruction.curveName
+ " has white interval " + edge.getCurveStart() + " "
+ edge.getCurveEnd() + " " + cend);
if (cend < edge.getCurveEnd()) {
cend = edge.getCurveEnd();
cedge = edge;
}
if (edge.getCurveEnd() == pose.size() - 1) {
logger.log(Level.FINER, MapConstruction.curveName
+ " processing completed.");
return;
}
this.computeNextInterval(edge, pose,
edge.getCurveEndIndex() + 1, eps, altEps);
if (!edge.getDone()) {
pq.add(edge);
}
if (pq.isEmpty()) {
logger.log(Level.FINER, MapConstruction.curveName
+ " inserted as an edge from " + cend + " to end");
int index = (int) Math.floor(cend);
Line newLine = new Line(pose.get(index),
pose.get(index + 1));
double t = cend - Math.floor(cend);
this.updateMap(
constructedMap,
map,
new Edge(cedge.getLine().getVertex(
cedge.getEdgeEnd()), newLine.getVertex(t)));
this.edgeSplit(constructedMap, map, cedge,
cedge.getEdgeEnd());
this.updateMap(constructedMap, map,
new Edge(newLine.getVertex(t), pose.get(index + 1)));
this.addToGraph(constructedMap, pose, map, index + 1,
pose.size() - 1);
return;
}
edge = pq.poll();
if (edge.getCurveStart() > cend) {
logger.log(Level.FINER, MapConstruction.curveName
+ " inserted as an edge from " + cend + " to "
+ edge.getCurveStart());
// need to add rest of the line segment
int index = (int) Math.floor(cend);
int indexStart = (int) Math.floor(edge.getCurveStart());
Line newLine = new Line(pose.get(index),
pose.get(index + 1));
double t = cend - Math.floor(cend);
this.updateMap(
constructedMap,
map,
new Edge(cedge.getLine().getVertex(
cedge.getEdgeEnd()), newLine.getVertex(t)));
this.edgeSplit(constructedMap, map, cedge,
cedge.getEdgeEnd());
if (index == indexStart) {
this.updateMap(
constructedMap,
map,
new Edge(newLine.getVertex(t),
newLine.getVertex(edge.getCurveStart()
- index)));
index = (int) Math.floor(edge.getCurveStart());
newLine = new Line(pose.get(index), pose.get(index + 1));
t = edge.getCurveStart()
- Math.floor(edge.getCurveStart());
} else {
this.updateMap(
constructedMap,
map,
new Edge(newLine.getVertex(t), pose
.get(index + 1)));
this.addToGraph(constructedMap, pose, map, index + 1,
(int) Math.floor(edge.getCurveStart()));
index = (int) Math.floor(edge.getCurveStart());
newLine = new Line(pose.get(index), pose.get(index + 1));
t = edge.getCurveStart()
- Math.floor(edge.getCurveStart());
this.updateMap(constructedMap, map,
new Edge(pose.get(index), newLine.getVertex(t)));
}
this.updateMap(constructedMap, map,
new Edge(newLine.getVertex(t), edge.getLine()
.getVertex(edge.getEdgeStart())));
this.edgeSplit(constructedMap, map, edge,
edge.getEdgeStart());
}
}
} catch (Exception ex) {
logger.log(Level.SEVERE, ex.toString());
throw new RuntimeException(ex);
}
return;
}
public List<PoseFile> readAllFiles(File folder, boolean hasAltitude) {
List<PoseFile> poseFiles = new ArrayList<PoseFile>();
for (File file : folder.listFiles()) {
poseFiles.add(PoseFile.readFile(file, hasAltitude));
}
return poseFiles;
}
/**
* Constructs map from poses and returns string representation of the map.
*/
public List<Vertex> constructMapMain(List<PoseFile> poseFiles, double eps,
double altEps) {
List<Vertex> constructedMap = new ArrayList<Vertex>();
// map contains mapping between vertex keys and their indices in
// constructedMap
Map<String, Integer> map = new HashMap<String, Integer>();
try {
double length = 0;
// generate list of files in the folder to process
for (int k = 0; k < poseFiles.size(); k++) {
Long startTime = System.currentTimeMillis();
MapConstruction.curveid = k;
MapConstruction.curveName = poseFiles.get(k).getFileName();
length += poseFiles.get(k).getLength();
if (poseFiles.get(k).getPose().size() < 2) {
continue;
}
List<Edge> edges = new ArrayList<Edge>();
/*
* siblingMap contains map of key and sibling edges, sibling
* edges are line segments between two vertices but going in
* opposite direction.
*/
Map<String, ArrayList<Edge>> siblingMap = new HashMap<String, ArrayList<Edge>>();
for (int i = 0; i < constructedMap.size(); i++) {
Vertex v = constructedMap.get(i);
for (int j = 0; j < v.getDegree(); j++) {
Vertex v1 = constructedMap.get(v
.getAdjacentElementAt(j));
if (!v.equals(v1)) {
Edge newEdge = new Edge(v, v1);
edges.add(newEdge);
updateSiblingHashMap(siblingMap, newEdge);
}
}
}
this.mapConstruction(constructedMap, edges, map,
poseFiles.get(k).getPose(), eps, altEps);
this.commitEdgeSplitsAll(constructedMap, map, siblingMap, edges);
logger.info("k :" + k + " " + MapConstruction.curveName + " "
+ length + " :"
+ (System.currentTimeMillis() - startTime) / 60000.00);
}
} catch (Exception e) {
logger.log(Level.SEVERE, e.toString());
throw new RuntimeException(e);
}
return constructedMap;
}
public static void main(String args[]) {
MapConstruction mapConstruction = new MapConstruction();
// path to the folder that contains input tracks.
String inputPath = args[0];
// path to the folder where the output will be written.
String outputpath = args[1];
// epsilon; see the paper for detail
double eps = Double.parseDouble(args[2]);
// if the input files contains altitude information
boolean hasAltitude = Boolean.parseBoolean(args[3]);
// minimum altitude difference between two streets.
double altEps;
if (args.length > 4) {
altEps = Double.parseDouble(args[4]);
} else {
altEps = 4.0;
}
System.out.println(inputPath);
List<Vertex> constructedMap = mapConstruction.constructMapMain(
mapConstruction.readAllFiles(new File(inputPath), hasAltitude),
eps, altEps);
MapConstruction.writeToFile(constructedMap, outputpath);
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.datamgt.model;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.model.ModelWrapper;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* <p>
* This class is a wrapper for {@link DictMetaData}.
* </p>
*
* @author NQMINH
* @see DictMetaData
* @generated
*/
public class DictMetaDataWrapper implements DictMetaData,
ModelWrapper<DictMetaData> {
public DictMetaDataWrapper(DictMetaData dictMetaData) {
_dictMetaData = dictMetaData;
}
@Override
public Class<?> getModelClass() {
return DictMetaData.class;
}
@Override
public String getModelClassName() {
return DictMetaData.class.getName();
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("dictMetaDataId", getDictMetaDataId());
attributes.put("companyId", getCompanyId());
attributes.put("groupId", getGroupId());
attributes.put("userId", getUserId());
attributes.put("createDate", getCreateDate());
attributes.put("modifiedDate", getModifiedDate());
attributes.put("dictDataId", getDictDataId());
attributes.put("attributeName", getAttributeName());
attributes.put("attributeValue", getAttributeValue());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long dictMetaDataId = (Long)attributes.get("dictMetaDataId");
if (dictMetaDataId != null) {
setDictMetaDataId(dictMetaDataId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Date modifiedDate = (Date)attributes.get("modifiedDate");
if (modifiedDate != null) {
setModifiedDate(modifiedDate);
}
Long dictDataId = (Long)attributes.get("dictDataId");
if (dictDataId != null) {
setDictDataId(dictDataId);
}
String attributeName = (String)attributes.get("attributeName");
if (attributeName != null) {
setAttributeName(attributeName);
}
String attributeValue = (String)attributes.get("attributeValue");
if (attributeValue != null) {
setAttributeValue(attributeValue);
}
}
/**
* Returns the primary key of this dictionary meta data.
*
* @return the primary key of this dictionary meta data
*/
@Override
public long getPrimaryKey() {
return _dictMetaData.getPrimaryKey();
}
/**
* Sets the primary key of this dictionary meta data.
*
* @param primaryKey the primary key of this dictionary meta data
*/
@Override
public void setPrimaryKey(long primaryKey) {
_dictMetaData.setPrimaryKey(primaryKey);
}
/**
* Returns the dict meta data ID of this dictionary meta data.
*
* @return the dict meta data ID of this dictionary meta data
*/
@Override
public long getDictMetaDataId() {
return _dictMetaData.getDictMetaDataId();
}
/**
* Sets the dict meta data ID of this dictionary meta data.
*
* @param dictMetaDataId the dict meta data ID of this dictionary meta data
*/
@Override
public void setDictMetaDataId(long dictMetaDataId) {
_dictMetaData.setDictMetaDataId(dictMetaDataId);
}
/**
* Returns the company ID of this dictionary meta data.
*
* @return the company ID of this dictionary meta data
*/
@Override
public long getCompanyId() {
return _dictMetaData.getCompanyId();
}
/**
* Sets the company ID of this dictionary meta data.
*
* @param companyId the company ID of this dictionary meta data
*/
@Override
public void setCompanyId(long companyId) {
_dictMetaData.setCompanyId(companyId);
}
/**
* Returns the group ID of this dictionary meta data.
*
* @return the group ID of this dictionary meta data
*/
@Override
public long getGroupId() {
return _dictMetaData.getGroupId();
}
/**
* Sets the group ID of this dictionary meta data.
*
* @param groupId the group ID of this dictionary meta data
*/
@Override
public void setGroupId(long groupId) {
_dictMetaData.setGroupId(groupId);
}
/**
* Returns the user ID of this dictionary meta data.
*
* @return the user ID of this dictionary meta data
*/
@Override
public long getUserId() {
return _dictMetaData.getUserId();
}
/**
* Sets the user ID of this dictionary meta data.
*
* @param userId the user ID of this dictionary meta data
*/
@Override
public void setUserId(long userId) {
_dictMetaData.setUserId(userId);
}
/**
* Returns the user uuid of this dictionary meta data.
*
* @return the user uuid of this dictionary meta data
* @throws SystemException if a system exception occurred
*/
@Override
public java.lang.String getUserUuid()
throws com.liferay.portal.kernel.exception.SystemException {
return _dictMetaData.getUserUuid();
}
/**
* Sets the user uuid of this dictionary meta data.
*
* @param userUuid the user uuid of this dictionary meta data
*/
@Override
public void setUserUuid(java.lang.String userUuid) {
_dictMetaData.setUserUuid(userUuid);
}
/**
* Returns the create date of this dictionary meta data.
*
* @return the create date of this dictionary meta data
*/
@Override
public java.util.Date getCreateDate() {
return _dictMetaData.getCreateDate();
}
/**
* Sets the create date of this dictionary meta data.
*
* @param createDate the create date of this dictionary meta data
*/
@Override
public void setCreateDate(java.util.Date createDate) {
_dictMetaData.setCreateDate(createDate);
}
/**
* Returns the modified date of this dictionary meta data.
*
* @return the modified date of this dictionary meta data
*/
@Override
public java.util.Date getModifiedDate() {
return _dictMetaData.getModifiedDate();
}
/**
* Sets the modified date of this dictionary meta data.
*
* @param modifiedDate the modified date of this dictionary meta data
*/
@Override
public void setModifiedDate(java.util.Date modifiedDate) {
_dictMetaData.setModifiedDate(modifiedDate);
}
/**
* Returns the dict data ID of this dictionary meta data.
*
* @return the dict data ID of this dictionary meta data
*/
@Override
public long getDictDataId() {
return _dictMetaData.getDictDataId();
}
/**
* Sets the dict data ID of this dictionary meta data.
*
* @param dictDataId the dict data ID of this dictionary meta data
*/
@Override
public void setDictDataId(long dictDataId) {
_dictMetaData.setDictDataId(dictDataId);
}
/**
* Returns the attribute name of this dictionary meta data.
*
* @return the attribute name of this dictionary meta data
*/
@Override
public java.lang.String getAttributeName() {
return _dictMetaData.getAttributeName();
}
/**
* Sets the attribute name of this dictionary meta data.
*
* @param attributeName the attribute name of this dictionary meta data
*/
@Override
public void setAttributeName(java.lang.String attributeName) {
_dictMetaData.setAttributeName(attributeName);
}
/**
* Returns the attribute value of this dictionary meta data.
*
* @return the attribute value of this dictionary meta data
*/
@Override
public java.lang.String getAttributeValue() {
return _dictMetaData.getAttributeValue();
}
/**
* Sets the attribute value of this dictionary meta data.
*
* @param attributeValue the attribute value of this dictionary meta data
*/
@Override
public void setAttributeValue(java.lang.String attributeValue) {
_dictMetaData.setAttributeValue(attributeValue);
}
@Override
public boolean isNew() {
return _dictMetaData.isNew();
}
@Override
public void setNew(boolean n) {
_dictMetaData.setNew(n);
}
@Override
public boolean isCachedModel() {
return _dictMetaData.isCachedModel();
}
@Override
public void setCachedModel(boolean cachedModel) {
_dictMetaData.setCachedModel(cachedModel);
}
@Override
public boolean isEscapedModel() {
return _dictMetaData.isEscapedModel();
}
@Override
public java.io.Serializable getPrimaryKeyObj() {
return _dictMetaData.getPrimaryKeyObj();
}
@Override
public void setPrimaryKeyObj(java.io.Serializable primaryKeyObj) {
_dictMetaData.setPrimaryKeyObj(primaryKeyObj);
}
@Override
public com.liferay.portlet.expando.model.ExpandoBridge getExpandoBridge() {
return _dictMetaData.getExpandoBridge();
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portal.model.BaseModel<?> baseModel) {
_dictMetaData.setExpandoBridgeAttributes(baseModel);
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portlet.expando.model.ExpandoBridge expandoBridge) {
_dictMetaData.setExpandoBridgeAttributes(expandoBridge);
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portal.service.ServiceContext serviceContext) {
_dictMetaData.setExpandoBridgeAttributes(serviceContext);
}
@Override
public java.lang.Object clone() {
return new DictMetaDataWrapper((DictMetaData)_dictMetaData.clone());
}
@Override
public int compareTo(org.oep.datamgt.model.DictMetaData dictMetaData) {
return _dictMetaData.compareTo(dictMetaData);
}
@Override
public int hashCode() {
return _dictMetaData.hashCode();
}
@Override
public com.liferay.portal.model.CacheModel<org.oep.datamgt.model.DictMetaData> toCacheModel() {
return _dictMetaData.toCacheModel();
}
@Override
public org.oep.datamgt.model.DictMetaData toEscapedModel() {
return new DictMetaDataWrapper(_dictMetaData.toEscapedModel());
}
@Override
public org.oep.datamgt.model.DictMetaData toUnescapedModel() {
return new DictMetaDataWrapper(_dictMetaData.toUnescapedModel());
}
@Override
public java.lang.String toString() {
return _dictMetaData.toString();
}
@Override
public java.lang.String toXmlString() {
return _dictMetaData.toXmlString();
}
@Override
public void persist()
throws com.liferay.portal.kernel.exception.SystemException {
_dictMetaData.persist();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof DictMetaDataWrapper)) {
return false;
}
DictMetaDataWrapper dictMetaDataWrapper = (DictMetaDataWrapper)obj;
if (Validator.equals(_dictMetaData, dictMetaDataWrapper._dictMetaData)) {
return true;
}
return false;
}
/**
* @deprecated As of 6.1.0, replaced by {@link #getWrappedModel}
*/
public DictMetaData getWrappedDictMetaData() {
return _dictMetaData;
}
@Override
public DictMetaData getWrappedModel() {
return _dictMetaData;
}
@Override
public void resetOriginalValues() {
_dictMetaData.resetOriginalValues();
}
private DictMetaData _dictMetaData;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.indexstore;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CarbonLRUCache;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.DataMap;
import org.apache.carbondata.core.datastore.block.SegmentPropertiesAndSchemaHolder;
import org.apache.carbondata.core.indexstore.blockletindex.BlockDataMap;
import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMapFactory;
import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMapModel;
import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
import org.apache.carbondata.core.memory.MemoryException;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.util.BlockletDataMapUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
/**
* Class to handle loading, unloading,clearing,storing of the table
* blocks
*/
public class BlockletDataMapIndexStore
implements Cache<TableBlockIndexUniqueIdentifierWrapper, BlockletDataMapIndexWrapper> {
private static final Logger LOGGER =
LogServiceFactory.getLogService(BlockletDataMapIndexStore.class.getName());
/**
* CarbonLRU cache
*/
protected CarbonLRUCache lruCache;
/**
* map of block info to lock object map, while loading the btree this will be filled
* and removed after loading the tree for that particular block info, this will be useful
* while loading the tree concurrently so only block level lock will be applied another
* block can be loaded concurrently
*/
private Map<String, Object> segmentLockMap;
/**
* constructor to initialize the SegmentTaskIndexStore
*
* @param lruCache
*/
public BlockletDataMapIndexStore(CarbonLRUCache lruCache) {
this.lruCache = lruCache;
segmentLockMap = new ConcurrentHashMap<String, Object>();
}
@Override
public BlockletDataMapIndexWrapper get(TableBlockIndexUniqueIdentifierWrapper identifierWrapper)
throws IOException {
return get(identifierWrapper, null);
}
private BlockletDataMapIndexWrapper get(TableBlockIndexUniqueIdentifierWrapper identifierWrapper,
Map<String, Map<String, BlockMetaInfo>> segInfoCache) throws IOException {
TableBlockIndexUniqueIdentifier identifier =
identifierWrapper.getTableBlockIndexUniqueIdentifier();
String lruCacheKey = identifier.getUniqueTableSegmentIdentifier();
BlockletDataMapIndexWrapper blockletDataMapIndexWrapper =
(BlockletDataMapIndexWrapper) lruCache.get(lruCacheKey);
List<BlockDataMap> dataMaps = new ArrayList<>();
if (blockletDataMapIndexWrapper == null) {
try {
SegmentIndexFileStore indexFileStore =
new SegmentIndexFileStore(identifierWrapper.getConfiguration());
Set<String> filesRead = new HashSet<>();
String segmentFilePath = identifier.getIndexFilePath();
if (segInfoCache == null) {
segInfoCache = new HashMap<String, Map<String, BlockMetaInfo>>();
}
Map<String, BlockMetaInfo> carbonDataFileBlockMetaInfoMapping =
segInfoCache.get(segmentFilePath);
if (carbonDataFileBlockMetaInfoMapping == null) {
carbonDataFileBlockMetaInfoMapping =
BlockletDataMapUtil.createCarbonDataFileBlockMetaInfoMapping(segmentFilePath,
identifierWrapper.getConfiguration());
segInfoCache.put(segmentFilePath, carbonDataFileBlockMetaInfoMapping);
}
// if the identifier is not a merge file we can directly load the datamaps
if (identifier.getMergeIndexFileName() == null) {
Map<String, BlockMetaInfo> blockMetaInfoMap = BlockletDataMapUtil
.getBlockMetaInfoMap(identifierWrapper, indexFileStore, filesRead,
carbonDataFileBlockMetaInfoMapping);
BlockDataMap blockletDataMap =
loadAndGetDataMap(identifier, indexFileStore, blockMetaInfoMap,
identifierWrapper.getCarbonTable(),
identifierWrapper.isAddTableBlockToUnsafeAndLRUCache(),
identifierWrapper.getConfiguration());
dataMaps.add(blockletDataMap);
blockletDataMapIndexWrapper =
new BlockletDataMapIndexWrapper(identifier.getSegmentId(), dataMaps);
} else {
// if the identifier is a merge file then collect the index files and load the datamaps
List<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers =
BlockletDataMapUtil.getIndexFileIdentifiersFromMergeFile(identifier, indexFileStore);
for (TableBlockIndexUniqueIdentifier blockIndexUniqueIdentifier :
tableBlockIndexUniqueIdentifiers) {
Map<String, BlockMetaInfo> blockMetaInfoMap = BlockletDataMapUtil.getBlockMetaInfoMap(
new TableBlockIndexUniqueIdentifierWrapper(blockIndexUniqueIdentifier,
identifierWrapper.getCarbonTable()), indexFileStore, filesRead,
carbonDataFileBlockMetaInfoMapping);
if (!blockMetaInfoMap.isEmpty()) {
BlockDataMap blockletDataMap =
loadAndGetDataMap(blockIndexUniqueIdentifier, indexFileStore, blockMetaInfoMap,
identifierWrapper.getCarbonTable(),
identifierWrapper.isAddTableBlockToUnsafeAndLRUCache(),
identifierWrapper.getConfiguration());
dataMaps.add(blockletDataMap);
}
}
blockletDataMapIndexWrapper =
new BlockletDataMapIndexWrapper(identifier.getSegmentId(), dataMaps);
}
if (identifierWrapper.isAddTableBlockToUnsafeAndLRUCache()) {
lruCache.put(identifier.getUniqueTableSegmentIdentifier(), blockletDataMapIndexWrapper,
blockletDataMapIndexWrapper.getMemorySize());
}
} catch (Throwable e) {
// clear all the memory used by datamaps loaded
for (DataMap dataMap : dataMaps) {
dataMap.clear();
}
LOGGER.error("memory exception when loading datamap: " + e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return blockletDataMapIndexWrapper;
}
@Override public List<BlockletDataMapIndexWrapper> getAll(
List<TableBlockIndexUniqueIdentifierWrapper> tableSegmentUniqueIdentifiers)
throws IOException {
Map<String, Map<String, BlockMetaInfo>> segInfoCache
= new HashMap<String, Map<String, BlockMetaInfo>>();
List<BlockletDataMapIndexWrapper> blockletDataMapIndexWrappers =
new ArrayList<>(tableSegmentUniqueIdentifiers.size());
List<TableBlockIndexUniqueIdentifierWrapper> missedIdentifiersWrapper = new ArrayList<>();
BlockletDataMapIndexWrapper blockletDataMapIndexWrapper = null;
// Get the datamaps for each indexfile from cache.
try {
for (TableBlockIndexUniqueIdentifierWrapper
identifierWrapper : tableSegmentUniqueIdentifiers) {
BlockletDataMapIndexWrapper dataMapIndexWrapper =
getIfPresent(identifierWrapper);
if (dataMapIndexWrapper != null) {
blockletDataMapIndexWrappers.add(dataMapIndexWrapper);
} else {
missedIdentifiersWrapper.add(identifierWrapper);
}
}
if (missedIdentifiersWrapper.size() > 0) {
for (TableBlockIndexUniqueIdentifierWrapper identifierWrapper : missedIdentifiersWrapper) {
blockletDataMapIndexWrapper = get(identifierWrapper, segInfoCache);
blockletDataMapIndexWrappers.add(blockletDataMapIndexWrapper);
}
}
} catch (Throwable e) {
if (null != blockletDataMapIndexWrapper) {
List<BlockDataMap> dataMaps = blockletDataMapIndexWrapper.getDataMaps();
for (DataMap dataMap : dataMaps) {
dataMap.clear();
}
}
throw new IOException("Problem in loading segment blocks: " + e.getMessage(), e);
}
return blockletDataMapIndexWrappers;
}
/**
* returns the SegmentTaskIndexWrapper
*
* @param tableSegmentUniqueIdentifierWrapper
* @return
*/
@Override public BlockletDataMapIndexWrapper getIfPresent(
TableBlockIndexUniqueIdentifierWrapper tableSegmentUniqueIdentifierWrapper) {
return (BlockletDataMapIndexWrapper) lruCache.get(
tableSegmentUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
.getUniqueTableSegmentIdentifier());
}
/**
* method invalidate the segment cache for segment
*
* @param tableSegmentUniqueIdentifierWrapper
*/
@Override public void invalidate(
TableBlockIndexUniqueIdentifierWrapper tableSegmentUniqueIdentifierWrapper) {
BlockletDataMapIndexWrapper blockletDataMapIndexWrapper =
getIfPresent(tableSegmentUniqueIdentifierWrapper);
if (null != blockletDataMapIndexWrapper) {
// clear the segmentProperties cache
List<BlockDataMap> dataMaps = blockletDataMapIndexWrapper.getDataMaps();
if (null != dataMaps && !dataMaps.isEmpty()) {
String segmentId =
tableSegmentUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier().getSegmentId();
// as segmentId will be same for all the dataMaps and segmentProperties cache is
// maintained at segment level so it need to be called only once for clearing
SegmentPropertiesAndSchemaHolder.getInstance()
.invalidate(segmentId, dataMaps.get(0).getSegmentPropertiesIndex(),
tableSegmentUniqueIdentifierWrapper.isAddTableBlockToUnsafeAndLRUCache());
}
}
lruCache.remove(tableSegmentUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
.getUniqueTableSegmentIdentifier());
}
@Override
public void put(TableBlockIndexUniqueIdentifierWrapper tableBlockIndexUniqueIdentifierWrapper,
BlockletDataMapIndexWrapper wrapper) throws IOException, MemoryException {
// As dataMap will use unsafe memory, it is not recommended to overwrite an existing entry
// as in that case clearing unsafe memory need to be taken card. If at all datamap entry
// in the cache need to be overwritten then use the invalidate interface
// and then use the put interface
if (null == getIfPresent(tableBlockIndexUniqueIdentifierWrapper)) {
List<BlockDataMap> dataMaps = wrapper.getDataMaps();
try {
for (BlockDataMap blockletDataMap : dataMaps) {
blockletDataMap.convertToUnsafeDMStore();
}
// Locking is not required here because in LRU cache map add method is synchronized to add
// only one entry at a time and if a key already exists it will not overwrite the entry
lruCache.put(tableBlockIndexUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
.getUniqueTableSegmentIdentifier(), wrapper, wrapper.getMemorySize());
} catch (Throwable e) {
// clear all the memory acquired by data map in case of any failure
for (DataMap blockletDataMap : dataMaps) {
blockletDataMap.clear();
}
throw new IOException("Problem in adding datamap to cache.", e);
}
}
}
/**
* Below method will be used to load the segment of segments
* One segment may have multiple task , so table segment will be loaded
* based on task id and will return the map of taksId to table segment
* map
*
* @return map of taks id to segment mapping
* @throws IOException
*/
private BlockDataMap loadAndGetDataMap(TableBlockIndexUniqueIdentifier identifier,
SegmentIndexFileStore indexFileStore, Map<String, BlockMetaInfo> blockMetaInfoMap,
CarbonTable carbonTable, boolean addTableBlockToUnsafe, Configuration configuration)
throws IOException, MemoryException {
String uniqueTableSegmentIdentifier =
identifier.getUniqueTableSegmentIdentifier();
Object lock = segmentLockMap.get(uniqueTableSegmentIdentifier);
if (lock == null) {
lock = addAndGetSegmentLock(uniqueTableSegmentIdentifier);
}
BlockDataMap dataMap;
synchronized (lock) {
dataMap = (BlockDataMap) BlockletDataMapFactory.createDataMap(carbonTable);
dataMap.init(new BlockletDataMapModel(carbonTable,
identifier.getIndexFilePath() + CarbonCommonConstants.FILE_SEPARATOR + identifier
.getIndexFileName(), indexFileStore.getFileData(identifier.getIndexFileName()),
blockMetaInfoMap, identifier.getSegmentId(), addTableBlockToUnsafe, configuration));
}
return dataMap;
}
/**
* Below method will be used to get the segment level lock object
*
* @param uniqueIdentifier
* @return lock object
*/
private synchronized Object addAndGetSegmentLock(String uniqueIdentifier) {
// get the segment lock object if it is present then return
// otherwise add the new lock and return
Object segmentLoderLockObject = segmentLockMap.get(uniqueIdentifier);
if (null == segmentLoderLockObject) {
segmentLoderLockObject = new Object();
segmentLockMap.put(uniqueIdentifier, segmentLoderLockObject);
}
return segmentLoderLockObject;
}
/**
* The method clears the access count of table segments
*
* @param tableSegmentUniqueIdentifiersWrapper
*/
@Override public void clearAccessCount(
List<TableBlockIndexUniqueIdentifierWrapper> tableSegmentUniqueIdentifiersWrapper) {
for (TableBlockIndexUniqueIdentifierWrapper
identifierWrapper : tableSegmentUniqueIdentifiersWrapper) {
BlockDataMap cacheable = (BlockDataMap) lruCache.get(
identifierWrapper.getTableBlockIndexUniqueIdentifier().getUniqueTableSegmentIdentifier());
cacheable.clear();
}
}
}
| |
/**
* Copyright (C) 2009 GIP RECIA http://www.recia.fr
* @Author (C) 2009 GIP RECIA <contact@recia.fr>
* @Contributor (C) 2009 SOPRA http://www.sopragroup.com/
* @Contributor (C) 2011 Pierre Legay <pierre.legay@recia.fr>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.shared_impl.renderkit.html;
import org.apache.myfaces.shared_impl.renderkit.JSFAttr;
import org.apache.myfaces.shared_impl.renderkit.RendererUtils;
import org.apache.myfaces.shared_impl.renderkit.html.HTML;
import org.apache.myfaces.shared_impl.renderkit.html.HtmlMessageRendererBase;
import org.apache.myfaces.shared_impl.util.NullIterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.faces.application.FacesMessage;
import javax.faces.component.UIComponent;
import javax.faces.component.UIMessages;
import javax.faces.component.html.HtmlMessages;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
/**
* @author Manfred Geiler (latest modification by $Author: grantsmith $)
* @version $Revision: 472618 $ $Date: 2006-11-08 21:06:54 +0100 (Mi, 08 Nov 2006) $
*/
public abstract class HtmlMessagesRendererBase
extends HtmlMessageRendererBase
{
private static final Log log = LogFactory.getLog(HtmlMessagesRendererBase.class);
protected static final String LAYOUT_LIST = "list";
protected static final String LAYOUT_TABLE = "table";
protected void renderMessages(FacesContext facesContext,
UIComponent messages)
throws IOException
{
MessagesIterator messagesIterator = new MessagesIterator(facesContext,
isGlobalOnly(messages));
if (messagesIterator.hasNext())
{
String layout = getLayout(messages);
if (layout == null)
{
if (log.isDebugEnabled())
{
log.debug("No messages layout given, using default layout 'list'.");
}
renderList(facesContext, messages, messagesIterator);
}
else if (layout.equalsIgnoreCase(LAYOUT_TABLE))
{
renderTable(facesContext, messages, messagesIterator);
}
else
{
if (log.isWarnEnabled() && !layout.equalsIgnoreCase(LAYOUT_LIST))
{
log.warn("Unsupported messages layout '" + layout + "' - using default layout 'list'.");
}
renderList(facesContext, messages, messagesIterator);
}
}
}
protected void renderList(FacesContext facesContext,
UIComponent messages,
MessagesIterator messagesIterator)
throws IOException
{
ResponseWriter writer = facesContext.getResponseWriter();
writer.startElement(HTML.UL_ELEM, messages);
HtmlRendererUtils.writeIdIfNecessary(writer, messages, facesContext);
while(messagesIterator.hasNext())
{
writer.startElement(org.apache.myfaces.shared_impl.renderkit.html.HTML.LI_ELEM, messages);
renderSingleFacesMessage(facesContext,
messages,
(FacesMessage)messagesIterator.next(),
messagesIterator.getClientId());
writer.endElement(HTML.LI_ELEM);
}
writer.endElement(HTML.UL_ELEM);
}
protected void renderTable(FacesContext facesContext,
UIComponent messages,
MessagesIterator messagesIterator)
throws IOException
{
ResponseWriter writer = facesContext.getResponseWriter();
writer.startElement(HTML.TABLE_ELEM, messages);
HtmlRendererUtils.writeIdIfNecessary(writer, messages, facesContext);
while(messagesIterator.hasNext())
{
writer.startElement(HTML.TR_ELEM, messages);
writer.startElement(HTML.TD_ELEM, messages);
renderSingleFacesMessage(facesContext,
messages,
(FacesMessage)messagesIterator.next(),
messagesIterator.getClientId());
writer.endElement(HTML.TD_ELEM);
writer.endElement(HTML.TR_ELEM);
}
writer.endElement(HTML.TABLE_ELEM);
}
public static String[] getStyleAndStyleClass(UIComponent messages,
FacesMessage.Severity severity)
{
String style = null;
String styleClass = null;
if (messages instanceof HtmlMessages)
{
if (severity == FacesMessage.SEVERITY_INFO)
{
style = ((HtmlMessages)messages).getInfoStyle();
styleClass = ((HtmlMessages)messages).getInfoClass();
}
else if (severity == FacesMessage.SEVERITY_WARN)
{
style = ((HtmlMessages)messages).getWarnStyle();
styleClass = ((HtmlMessages)messages).getWarnClass();
}
else if (severity == FacesMessage.SEVERITY_ERROR)
{
style = ((HtmlMessages)messages).getErrorStyle();
styleClass = ((HtmlMessages)messages).getErrorClass();
}
else if (severity == FacesMessage.SEVERITY_FATAL)
{
style = ((HtmlMessages)messages).getFatalStyle();
styleClass = ((HtmlMessages)messages).getFatalClass();
}
if (style == null)
{
style = ((HtmlMessages)messages).getStyle();
}
if (styleClass == null)
{
styleClass = ((HtmlMessages)messages).getStyleClass();
}
}
else
{
Map attr = messages.getAttributes();
if (severity == FacesMessage.SEVERITY_INFO)
{
style = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.INFO_STYLE_ATTR);
styleClass = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.INFO_CLASS_ATTR);
}
else if (severity == FacesMessage.SEVERITY_WARN)
{
style = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.WARN_STYLE_ATTR);
styleClass = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.WARN_CLASS_ATTR);
}
else if (severity == FacesMessage.SEVERITY_ERROR)
{
style = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.ERROR_STYLE_ATTR);
styleClass = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.ERROR_CLASS_ATTR);
}
else if (severity == FacesMessage.SEVERITY_FATAL)
{
style = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.FATAL_STYLE_ATTR);
styleClass = (String)attr.get(JSFAttr.FATAL_CLASS_ATTR);
}
if (style == null)
{
style = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.STYLE_CLASS_ATTR);
}
if (styleClass == null)
{
styleClass = (String)attr.get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.STYLE_CLASS_ATTR);
}
}
return new String[] {style, styleClass};
}
protected String getTitle(UIComponent component)
{
if (component instanceof HtmlMessages)
{
return ((HtmlMessages)component).getTitle();
}
else
{
return (String)component.getAttributes().get(org.apache.myfaces.shared_impl.renderkit.JSFAttr.TITLE_ATTR);
}
}
protected boolean isTooltip(UIComponent component)
{
if (component instanceof HtmlMessages)
{
return ((HtmlMessages)component).isTooltip();
}
else
{
return org.apache.myfaces.shared_impl.renderkit.RendererUtils.getBooleanAttribute(component, org.apache.myfaces.shared_impl.renderkit.JSFAttr.TOOLTIP_ATTR, false);
}
}
protected boolean isShowSummary(UIComponent component)
{
if (component instanceof UIMessages)
{
return ((UIMessages)component).isShowSummary();
}
else
{
return RendererUtils.getBooleanAttribute(component, JSFAttr.SHOW_SUMMARY_ATTR, false);
}
}
protected boolean isShowDetail(UIComponent component)
{
if (component instanceof UIMessages)
{
return ((UIMessages)component).isShowDetail();
}
else
{
return org.apache.myfaces.shared_impl.renderkit.RendererUtils.getBooleanAttribute(component, JSFAttr.SHOW_DETAIL_ATTR, false);
}
}
protected boolean isGlobalOnly(UIComponent component)
{
if (component instanceof UIMessages)
{
return ((UIMessages)component).isGlobalOnly();
}
else
{
return org.apache.myfaces.shared_impl.renderkit.RendererUtils.getBooleanAttribute(component, JSFAttr.GLOBAL_ONLY_ATTR, false);
}
}
protected String getLayout(UIComponent component)
{
if (component instanceof HtmlMessages)
{
return ((HtmlMessages)component).getLayout();
}
else
{
return (String)component.getAttributes().get(JSFAttr.LAYOUT_ATTR);
}
}
private static class MessagesIterator implements Iterator
{
private FacesContext _facesContext;
private Iterator _globalMessagesIterator;
private Iterator _clientIdsWithMessagesIterator;
private Iterator _componentMessagesIterator = null;
private String _clientId = null;
public MessagesIterator(FacesContext facesContext, boolean globalOnly)
{
_facesContext = facesContext;
if (globalOnly)
{
_globalMessagesIterator = facesContext.getMessages(null);
_clientIdsWithMessagesIterator = NullIterator.instance();
}
else
{
_globalMessagesIterator = org.apache.myfaces.shared_impl.util.NullIterator.instance();
_clientIdsWithMessagesIterator = facesContext.getClientIdsWithMessages();
}
_componentMessagesIterator = null;
_clientId = null;
}
public boolean hasNext()
{
return _globalMessagesIterator.hasNext() ||
_clientIdsWithMessagesIterator.hasNext() ||
(_componentMessagesIterator != null && _componentMessagesIterator.hasNext());
}
public Object next()
{
if (_globalMessagesIterator.hasNext())
{
return _globalMessagesIterator.next();
}
else if (_componentMessagesIterator != null && _componentMessagesIterator.hasNext())
{
return _componentMessagesIterator.next();
}
else
{
_clientId = (String)_clientIdsWithMessagesIterator.next();
_componentMessagesIterator = _facesContext.getMessages(_clientId);
return _componentMessagesIterator.next();
}
}
public void remove()
{
throw new UnsupportedOperationException(this.getClass().getName() + " UnsupportedOperationException");
}
public String getClientId()
{
return _clientId;
}
}
}
| |
/*
* Copyright 2011-2013 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.reactive4java.util;
import hu.akarnokd.reactive4java.base.Action0;
import hu.akarnokd.reactive4java.base.Action2;
import hu.akarnokd.reactive4java.base.Func0;
import hu.akarnokd.reactive4java.base.Func2;
import hu.akarnokd.reactive4java.base.Pred2;
import java.util.Comparator;
import java.util.concurrent.Callable;
import javax.annotation.Nonnull;
/**
* Class holding a Func2 object and providing various relevant methods
* of the {@code Functions} utility class as instance methods.
* <p>The class itself is of type {@code Func2<T, U, V>} and can be used where this type is needed.</p>
* @author akarnokd, 2012.02.02.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @since 0.96.1
*/
public class Func2Builder<T, U, V> implements Func2<T, U, V> {
/** The wrapped function. */
@Nonnull
protected final Func2<T, U, V> f;
/**
* Construct an instance of this builder with the wrapped function.
* @param f the function to wrap
*/
protected Func2Builder(@Nonnull Func2<T, U, V> f) {
this.f = f;
}
/**
* Wrap the given function into a function builder instance.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @param f the function to wrap
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(@Nonnull Func2<T, U, V> f) {
return new Func2Builder<T, U, V>(f);
}
/**
* Wraps the given Func0 function into a Func2 object which ignores
* its parameter T.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @param f the function to wrap
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(@Nonnull Func0<V> f) {
return from(Functions.<T, U, V>asFunc2(f));
}
/**
* Wraps the given value and the function returns this value
* regardless of the parameters.
* @param <T> the function parameter type, irrelevant
* @param <U> the function parameter type, irrelevant
* @param <V> the return type
* @param value the value to return
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(final V value) {
return from(Functions.<T, U, V>constant2(value));
}
@Override
public V invoke(T param1, U param2) {
return f.invoke(param1, param2);
}
/**
* @param <T> the parameter type (irrelevant)
* @param <U> the parameter type (irrelevant)
* @return a function builder which returns always true.
*/
@Nonnull
public static <T, U> Func2Builder<T, U, Boolean> alwaysTrue() {
return from(Functions.<T, U>alwaysTrue2());
}
/**
* @param <T> the parameter type (irrelevant)
* @param <U> the parameter type (irrelevant)
* @return a function builder which retuns always false.
*/
@Nonnull
public static <T, U> Func2Builder<T, U, Boolean> alwaysFalse() {
return from(Functions.<T, U>alwaysFalse2());
}
/**
* Convert this function into a zero parameter function builder by fixing the parameter
* to the given values.
* @param param1 the fixed parameter value
* @param param2 the fixed parameter value
* @return the function builder
*/
@Nonnull
public Func0Builder<V> toFunc0(final T param1, final U param2) {
return Func0Builder.from(new Func0<V>() {
@Override
public V invoke() {
return f.invoke(param1, param2);
}
});
}
/**
* Returns a function which takes the logical not of the wrapped boolean returning function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @return the function builder.
*/
@SuppressWarnings("unchecked")
@Nonnull
public Func2Builder<T, U, Boolean> not() {
return from(Functions.not((Func2<T, U, Boolean>)f));
}
/**
* Returns a function which produces the logical AND value of this and the other function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @param func the function to AND with
* @return the function builder
*/
@SuppressWarnings("unchecked")
@Nonnull
public Func2Builder<T, U, Boolean> and(@Nonnull final Func2<? super T, ? super U, Boolean> func) {
return from(Functions.and((Func2<T, U, Boolean>)f, func));
}
/**
* Returns a function which produces the logical AND value of this and the other function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @param func the function to AND with
* @return the function builder
*/
@Nonnull
public Func2Builder<T, U, Boolean> and(@Nonnull final Func0<Boolean> func) {
return from(new Pred2<T, U>() {
@Override
public Boolean invoke(T param1, U param2) {
return ((Boolean)f.invoke(param1, param2)) && func.invoke();
}
});
}
/**
* Returns a function which produces the logical OR value of this and the other function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @param func the function to OR with
* @return the function builder
*/
@SuppressWarnings("unchecked")
@Nonnull
public Func2Builder<T, U, Boolean> or(
@Nonnull Func2<? super T, ? super U, Boolean> func) {
return from(Functions.or((Func2<T, U, Boolean>)f, func));
}
/**
* Returns a function which produces the logical AND value of this and the other function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @param func the function to AND with
* @return the function builder
*/
@Nonnull
public Func2Builder<T, U, Boolean> or(@Nonnull final Func0<Boolean> func) {
return from(new Pred2<T, U>() {
@Override
public Boolean invoke(T param1, U param2) {
return ((Boolean)f.invoke(param1, param2)) || func.invoke();
}
});
}
/**
* Returns a function which produces the logical XOR value of this and the other function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @param func the function to XOR with
* @return the function builder
*/
@SuppressWarnings("unchecked")
@Nonnull
public Func2Builder<T, U, Boolean> xor(
@Nonnull Func2<? super T, ? super U, Boolean> func) {
return from(Functions.xor((Func2<T, U, Boolean>)f, func));
}
/**
* Returns a function which produces the logical AND value of this and the other function.
* <p><b>Note: this function will throw a ClassCastException if the current function return type
* is not Boolean.</b></p>
* @param func the function to AND with
* @return the function builder
*/
@Nonnull
public Func2Builder<T, U, Boolean> xor(@Nonnull final Func0<Boolean> func) {
return from(new Pred2<T, U>() {
@Override
public Boolean invoke(T param1, U param2) {
return ((Boolean)f.invoke(param1, param2)) ^ func.invoke();
}
});
}
/**
* Construct a function which invokes the given action and
* returns a constant value.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @param action the action to invoke on each function invocation
* @param result the return value by this function
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(@Nonnull Action0 action, V result) {
return from(Functions.<T, U, V>asFunc2(action, result));
}
/**
* Construct a function which invokes the given runnable and
* returns a constant value.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @param run the runnable to wrap
* @param result the return value by this function
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(
@Nonnull Runnable run, V result) {
return from(Functions.<T, U, V>asFunc2(run, result));
}
/**
* Construct a function which invokes the given action and
* returns a constant value.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @param action the action to invoke on each function invocation
* @param result the return value by this function
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(
@Nonnull Action2<? super T, ? super U> action, V result) {
return from(Functions.<T, U, V>asFunc2(action, result));
}
/**
* Wrap the given function into a new builder.
* @param <T> the function first parameter type
* @param <U> the function second parameter type
* @param <V> the return type
* @param f the function to wrap
* @return the function builder
*/
@Nonnull
public static <T, U, V> Func2Builder<T, U, V> from(
@Nonnull Callable<? extends V> f) {
return from(Functions.<T, U, V>asFunc2(f));
}
/**
* Wraps the given comparator into a 2 parameter function.
* @param <T> the value type
* @param comp the comparator to wrap
* @return the function builder
*/
@Nonnull
public static <T> Func2Builder<T, T, Integer> from(
@Nonnull Comparator<? super T> comp) {
return from(Functions.<T>asFunc2(comp));
}
/**
* Convert this two parameter function into a comparator.
* <p><b>Note, if the parameter types are not the same or the return type is not Integer
* you might expect to get a ClassCastException.</b></p>
* @return the comparator representing this function
*/
@Nonnull
public Comparator<T> toComparator() {
return new Comparator<T>() {
@Override
@SuppressWarnings("unchecked")
public int compare(T o1, T o2) {
return (Integer)f.invoke(o1, (U)o2);
}
};
}
/**
* Returns a matrix indexer function.
* @param <T> the element type
* @param matrix the matrix to index
* @return the function builder
*/
@Nonnull
public static <T> Func2Builder<Integer, Integer, T> from(
@Nonnull T[][] matrix) {
return from(Functions.asFunc2(matrix));
}
/**
* Returns a matrix indexer function.
* @param matrix the matrix to index
* @return the function builder
*/
@Nonnull
public static Func2Builder<Integer, Integer, Integer> from(
@Nonnull int[][] matrix) {
return from(Functions.asFunc2(matrix));
}
/**
* Returns a matrix indexer function.
* @param matrix the matrix to index
* @return the function builder
*/
@Nonnull
public static Func2Builder<Integer, Integer, Double> from(
@Nonnull double[][] matrix) {
return from(Functions.asFunc2(matrix));
}
/**
* Returns a matrix indexer function.
* @param matrix the matrix to index
* @return the function builder
*/
@Nonnull
public static Func2Builder<Integer, Integer, Long> from(
@Nonnull long[][] matrix) {
return from(Functions.asFunc2(matrix));
}
}
| |
/*
* Copyright (c) 2014, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.oryx.app.speed.als;
import javax.xml.bind.JAXBException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.typesafe.config.Config;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.dmg.pmml.PMML;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;
import com.cloudera.oryx.api.KeyMessage;
import com.cloudera.oryx.api.speed.SpeedModelManager;
import com.cloudera.oryx.app.als.ALSUtils;
import com.cloudera.oryx.app.common.fn.MLFunctions;
import com.cloudera.oryx.app.pmml.AppPMMLUtils;
import com.cloudera.oryx.common.math.VectorMath;
import com.cloudera.oryx.common.pmml.PMMLUtils;
import com.cloudera.oryx.common.text.TextUtils;
import com.cloudera.oryx.common.math.SingularMatrixSolverException;
import com.cloudera.oryx.common.math.Solver;
import com.cloudera.oryx.lambda.Functions;
/**
* Implementation of {@link SpeedModelManager} that maintains and updates an ALS model in memory.
*/
public final class ALSSpeedModelManager implements SpeedModelManager<String,String,String> {
private static final Logger log = LoggerFactory.getLogger(ALSSpeedModelManager.class);
private static final ObjectMapper MAPPER = new ObjectMapper();
private ALSSpeedModel model;
private final boolean implicit;
private final boolean noKnownItems;
public ALSSpeedModelManager(Config config) {
implicit = config.getBoolean("oryx.als.implicit");
noKnownItems = config.getBoolean("oryx.als.no-known-items");
}
@Override
public void consume(Iterator<KeyMessage<String,String>> updateIterator) throws IOException {
while (updateIterator.hasNext()) {
KeyMessage<String,String> km = updateIterator.next();
String key = km.getKey();
String message = km.getMessage();
switch (key) {
case "UP":
if (model == null) {
continue; // No model to interpret with yet, so skip it
}
List<?> update = MAPPER.readValue(message, List.class);
// Update
String id = update.get(1).toString();
float[] vector = MAPPER.convertValue(update.get(2), float[].class);
switch (update.get(0).toString()) {
case "X":
model.setUserVector(id, vector);
break;
case "Y":
model.setItemVector(id, vector);
break;
default:
throw new IllegalStateException("Bad update " + message);
}
break;
case "MODEL":
// New model
PMML pmml;
try {
pmml = PMMLUtils.fromString(message);
} catch (JAXBException e) {
throw new IOException(e);
}
int features = Integer.parseInt(AppPMMLUtils.getExtensionValue(pmml, "features"));
if (model == null) {
log.info("No previous model; creating new model");
model = new ALSSpeedModel(features);
} else if (features != model.getFeatures()) {
log.warn("# features has changed! removing old model and creating new one");
model = new ALSSpeedModel(features);
} else {
log.info("Updating current model");
// First, remove users/items no longer in the model
List<String> XIDs = AppPMMLUtils.getExtensionContent(pmml, "XIDs");
List<String> YIDs = AppPMMLUtils.getExtensionContent(pmml, "YIDs");
model.pruneX(XIDs);
model.pruneY(YIDs);
}
break;
default:
throw new IllegalStateException("Unexpected key " + key);
}
}
}
@Override
public Iterable<String> buildUpdates(JavaPairRDD<String,String> newData) {
if (model == null) {
return Collections.emptyList();
}
// Order by timestamp and parse as tuples
JavaRDD<String> sortedValues =
newData.values().sortBy(MLFunctions.TO_TIMESTAMP_FN, true, newData.partitions().size());
JavaPairRDD<Tuple2<String,String>,Double> tuples = sortedValues.mapToPair(TO_TUPLE_FN);
JavaPairRDD<Tuple2<String,String>,Double> aggregated;
if (implicit) {
// See comments in ALSUpdate for explanation of how deletes are handled by this.
aggregated = tuples.groupByKey().mapValues(MLFunctions.SUM_WITH_NAN);
} else {
// For non-implicit, last wins.
aggregated = tuples.foldByKey(Double.NaN, Functions.<Double>last());
}
Collection<UserItemStrength> input = aggregated
.filter(MLFunctions.<Tuple2<String,String>>notNaNValue())
.map(TO_UIS_FN).collect();
Solver XTXsolver;
Solver YTYsolver;
try {
XTXsolver = model.getXTXSolver();
YTYsolver = model.getYTYSolver();
} catch (SingularMatrixSolverException smse) {
return Collections.emptyList();
}
Collection<String> result = new ArrayList<>();
for (UserItemStrength uis : input) {
String user = uis.getUser();
String item = uis.getItem();
double value = uis.getStrength();
// Xu is the current row u in the X user-feature matrix
float[] Xu = model.getUserVector(user);
// Yi is the current row i in the Y item-feature matrix
float[] Yi = model.getItemVector(item);
double[] newXu = newVector(YTYsolver, value, Xu, Yi);
// Similarly for Y vs X
double[] newYi = newVector(XTXsolver, value, Yi, Xu);
if (newXu != null) {
result.add(toUpdateJSON("X", user, newXu, item));
}
if (newYi != null) {
result.add(toUpdateJSON("Y", item, newYi, user));
}
}
return result;
}
private double[] newVector(Solver solver, double value, float[] Xu, float[] Yi) {
double[] newXu = null;
if (Yi != null) {
// Let Qui = Xu * (Yi)^t -- it's the current estimate of user-item interaction
// in Q = X * Y^t
// 0.5 reflects a "don't know" state
double currentValue = Xu == null ? 0.5 : VectorMath.dot(Xu, Yi);
double targetQui = computeTargetQui(value, currentValue);
// The entire vector Qu' is just 0, with Qui' in position i
// More generally we are looking for Qu' = Xu' * Y^t
if (!Double.isNaN(targetQui)) {
// Solving Qu' = Xu' * Y^t for Xu', now that we have Qui', as:
// Qu' * Y * (Y^t * Yi)^-1 = Xu'
// Qu' is 0 except for one value at position i, so it's really (Qui')*Yi
float[] QuiYi = Yi.clone();
for (int i = 0; i < QuiYi.length; i++) {
QuiYi[i] *= targetQui;
}
newXu = solver.solveFToD(QuiYi);
}
}
return newXu;
}
private String toUpdateJSON(String matrix, String ID, double[] vector, String otherID) {
List<?> args;
if (noKnownItems) {
args = Arrays.asList(matrix, ID, vector);
} else {
args = Arrays.asList(matrix, ID, vector, Collections.singletonList(otherID));
}
return TextUtils.joinJSON(args);
}
@Override
public void close() {
// do nothing
}
private double computeTargetQui(double value, double currentValue) {
// We want Qui to change based on value. What's the target value, Qui'?
// Then we find a new vector Xu' such that Qui' = Xu' * (Yi)^t
if (implicit) {
return ALSUtils.implicitTargetQui(value, currentValue);
} else {
// Non-implicit -- value is supposed to be the new value
return value;
}
}
private static final PairFunction<String,Tuple2<String,String>,Double> TO_TUPLE_FN =
new PairFunction<String,Tuple2<String,String>,Double>() {
@Override
public Tuple2<Tuple2<String,String>,Double> call(String line) throws Exception {
String[] tokens = MLFunctions.PARSE_FN.call(line);
String user = tokens[0];
String item = tokens[1];
Double strength = Double.valueOf(tokens[2]);
return new Tuple2<>(new Tuple2<>(user, item), strength);
}
};
private static final Function<Tuple2<Tuple2<String,String>,Double>,UserItemStrength> TO_UIS_FN =
new Function<Tuple2<Tuple2<String, String>, Double>, UserItemStrength>() {
@Override
public UserItemStrength call(Tuple2<Tuple2<String,String>,Double> tuple) {
return new UserItemStrength(tuple._1()._1(), tuple._1()._2(), tuple._2().floatValue());
}
};
}
| |
package jgrid.sunflow.renderer.task;
import java.io.Serializable;
import jgrid.sunflow.data.Bucket;
import org.sunflow.PluginRegistry;
import org.sunflow.core.BucketOrder;
import org.sunflow.core.Display;
import org.sunflow.core.Filter;
import org.sunflow.core.Instance;
import org.sunflow.core.IntersectionState;
import org.sunflow.core.Options;
import org.sunflow.core.Scene;
import org.sunflow.core.Shader;
import org.sunflow.core.ShadingState;
import org.sunflow.core.bucket.BucketOrderFactory;
import org.sunflow.core.filter.BoxFilter;
import org.sunflow.image.Color;
import org.sunflow.image.formats.GenericBitmap;
import org.sunflow.math.MathUtils;
import org.sunflow.math.QMC;
import org.sunflow.system.UI;
import org.sunflow.system.UI.Module;
/**
* A wrapper class for the Sunflow BucketRenderer.
*
* @author Szabolcs Pota
* @version 0.1.2
* @since 0.1
*/
public class BucketRendererTask implements RendererType, Serializable {
private Scene scene;
private Display display;
// resolution
private int imageWidth;
private int imageHeight;
// bucketing
private String bucketOrderName;
private BucketOrder bucketOrder;
private int bucketSize;
private int bucketCounter;
private int[] bucketCoords;
private boolean dumpBuckets;
// anti-aliasing
private int minAADepth;
private int maxAADepth;
private int superSampling;
private float contrastThreshold;
private boolean jitter;
private boolean displayAA;
// derived quantities
private double invSuperSampling;
private int subPixelSize;
private int minStepSize;
private int maxStepSize;
private int sigmaOrder;
private int sigmaLength;
private float thresh;
private boolean useJitter;
// filtering
private String filterName;
private Filter filter;
private int fs;
private float fhs;
// patched
private boolean stop = false;
// -------
public BucketRendererTask(RendererTask mainTask) {
bucketSize = 32;
bucketOrderName = "hilbert";
displayAA = false;
contrastThreshold = 0.1f;
filterName = "box";
jitter = false; // off by default
dumpBuckets = false; // for debugging only - not user settable
}
public void stop() {
stop = true;
}
public boolean prepare(Options options, Scene scene, int w, int h) {
// patched
stop = false;
// -------
this.scene = scene;
imageWidth = w;
imageHeight = h;
// fetch options
bucketSize = options.getInt("bucket.size", bucketSize);
bucketOrderName = options.getString("bucket.order", bucketOrderName);
minAADepth = options.getInt("aa.min", minAADepth);
maxAADepth = options.getInt("aa.max", maxAADepth);
superSampling = options.getInt("aa.samples", superSampling);
displayAA = options.getBoolean("aa.display", displayAA);
jitter = options.getBoolean("aa.jitter", jitter);
contrastThreshold = options.getFloat("aa.contrast", contrastThreshold);
// limit bucket size and compute number of buckets in each direction
bucketSize = MathUtils.clamp(bucketSize, 16, 512);
int numBucketsX = (imageWidth + bucketSize - 1) / bucketSize;
int numBucketsY = (imageHeight + bucketSize - 1) / bucketSize;
bucketOrder = BucketOrderFactory.create(bucketOrderName);
bucketCoords = bucketOrder.getBucketSequence(numBucketsX, numBucketsY);
// validate AA options
minAADepth = MathUtils.clamp(minAADepth, -4, 5);
maxAADepth = MathUtils.clamp(maxAADepth, minAADepth, 5);
superSampling = MathUtils.clamp(superSampling, 1, 256);
invSuperSampling = 1.0 / superSampling;
// compute AA stepping sizes
subPixelSize = (maxAADepth > 0) ? (1 << maxAADepth) : 1;
minStepSize = maxAADepth >= 0 ? 1 : 1 << (-maxAADepth);
if (minAADepth == maxAADepth) {
maxStepSize = minStepSize;
} else {
maxStepSize = minAADepth > 0 ? 1 << minAADepth : subPixelSize << (-minAADepth);
}
useJitter = jitter && maxAADepth > 0;
// compute anti-aliasing contrast thresholds
contrastThreshold = MathUtils.clamp(contrastThreshold, 0, 1);
thresh = contrastThreshold * (float) Math.pow(2.0f, minAADepth);
// read filter settings from scene
filterName = options.getString("filter", filterName);
filter = PluginRegistry.filterPlugins.createObject(filterName);
// adjust filter
if (filter == null) {
UI.printWarning(Module.BCKT, "Unrecognized filter type: \"%s\" - defaulting to box", filterName);
filter = new BoxFilter();
filterName = "box";
}
fhs = filter.getSize() * 0.5f;
fs = (int) Math.ceil(subPixelSize * (fhs - 0.5f));
// prepare QMC sampling
sigmaOrder = Math.min(QMC.MAX_SIGMA_ORDER, Math.max(0, maxAADepth) + 13); // FIXME: how big should the table be?
sigmaLength = 1 << sigmaOrder;
UI.printInfo(Module.BCKT, "Bucket renderer settings:");
UI.printInfo(Module.BCKT, " * Resolution: %dx%d", imageWidth, imageHeight);
UI.printInfo(Module.BCKT, " * Bucket size: %d", bucketSize);
UI.printInfo(Module.BCKT, " * Number of buckets: %dx%d", numBucketsX, numBucketsY);
if (minAADepth != maxAADepth) {
UI.printInfo(Module.BCKT, " * Anti-aliasing: %s -> %s (adaptive)", aaDepthToString(minAADepth), aaDepthToString(maxAADepth));
} else {
UI.printInfo(Module.BCKT, " * Anti-aliasing: %s (fixed)", aaDepthToString(minAADepth));
}
UI.printInfo(Module.BCKT, " * Rays per sample: %d", superSampling);
UI.printInfo(Module.BCKT, " * Subpixel jitter: %s", useJitter ? "on" : (jitter ? "auto-off" : "off"));
UI.printInfo(Module.BCKT, " * Contrast threshold: %.2f", contrastThreshold);
UI.printInfo(Module.BCKT, " * Filter type: %s", filterName);
UI.printInfo(Module.BCKT, " * Filter size: %.2f pixels", filter.getSize());
return true;
}
private String aaDepthToString(int depth) {
int pixelAA = (depth) < 0 ? -(1 << (-depth)) : (1 << depth);
return String.format("%s%d sample%s", depth < 0 ? "1/" : "", pixelAA * pixelAA, depth == 0 ? "" : "s");
}
public void renderBucket(Display display, Bucket bucket, int threadID, IntersectionState istate) {
// pixel sized extents
int bx = bucket.x0;
int by = bucket.y0;
int x0 = bx * bucket.width;
int y0 = by * bucket.height;
int bw = Math.min(bucket.width, bucket.imageWidth - x0);
int bh = Math.min(bucket.height, bucket.imageHeight - y0);
// prepare bucket
display.imagePrepare(x0, y0, bw, bh, threadID);
Color[] bucketRGB = new Color[bw * bh];
float[] bucketAlpha = new float[bw * bh];
// subpixel extents
int sx0 = x0 * subPixelSize - fs;
int sy0 = y0 * subPixelSize - fs;
int sbw = bw * subPixelSize + fs * 2;
int sbh = bh * subPixelSize + fs * 2;
// round up to align with maximum step size
sbw = (sbw + (maxStepSize - 1)) & (~(maxStepSize - 1));
sbh = (sbh + (maxStepSize - 1)) & (~(maxStepSize - 1));
// extra padding as needed
if (maxStepSize > 1) {
sbw++;
sbh++;
}
// allocate bucket memory
ImageSample[] samples = new ImageSample[sbw * sbh];
// allocate samples and compute jitter offsets
float invSubPixelSize = 1.0f / subPixelSize;
for (int y = 0, index = 0; y < sbh; y++) {
for (int x = 0; x < sbw; x++, index++) {
if (stop) return;
int sx = sx0 + x;
int sy = sy0 + y;
int j = sx & (sigmaLength - 1);
int k = sy & (sigmaLength - 1);
int i = (j << sigmaOrder) + QMC.sigma(k, sigmaOrder);
float dx = useJitter ? (float) QMC.halton(0, k) : 0.5f;
float dy = useJitter ? (float) QMC.halton(0, j) : 0.5f;
float rx = (sx + dx) * invSubPixelSize;
float ry = (sy + dy) * invSubPixelSize;
ry = imageHeight - ry;
samples[index] = new ImageSample(rx, ry, i);
}
}
for (int x = 0; x < sbw - 1; x += maxStepSize)
for (int y = 0; y < sbh - 1; y += maxStepSize)
refineSamples(samples, sbw, x, y, maxStepSize, thresh, istate);
if (dumpBuckets) {
UI.printInfo(Module.BCKT, "Dumping bucket [%d, %d] to file ...", bx, by);
GenericBitmap bitmap = new GenericBitmap(sbw, sbh);
for (int y = sbh - 1, index = 0; y >= 0; y--)
for (int x = 0; x < sbw; x++, index++)
bitmap.writePixel(x, y, samples[index].c, samples[index].alpha);
bitmap.save(String.format("bucket_%04d_%04d.png", bx, by));
}
if (displayAA) {
// color coded image of what is visible
float invArea = invSubPixelSize * invSubPixelSize;
for (int y = 0, index = 0; y < bh; y++) {
for (int x = 0; x < bw; x++, index++) {
int sampled = 0;
for (int i = 0; i < subPixelSize; i++) {
for (int j = 0; j < subPixelSize; j++) {
if (stop) return;
int sx = x * subPixelSize + fs + i;
int sy = y * subPixelSize + fs + j;
int s = sx + sy * sbw;
sampled += samples[s].sampled() ? 1 : 0;
}
}
bucketRGB[index] = new Color(sampled * invArea);
bucketAlpha[index] = 1.0f;
}
}
} else {
// filter samples into pixels
float cy = imageHeight - (y0 + 0.5f);
for (int y = 0, index = 0; y < bh; y++, cy--) {
float cx = x0 + 0.5f;
for (int x = 0; x < bw; x++, index++, cx++) {
Color c = Color.black();
float a = 0;
float weight = 0.0f;
for (int j = -fs, sy = y * subPixelSize; j <= fs; j++, sy++) {
for (int i = -fs, sx = x * subPixelSize, s = sx + sy * sbw; i <= fs; i++, sx++, s++) {
if (stop) return;
float dx = samples[s].rx - cx;
if (Math.abs(dx) > fhs)
continue;
float dy = samples[s].ry - cy;
if (Math.abs(dy) > fhs)
continue;
float f = filter.get(dx, dy);
c.madd(f, samples[s].c);
a += f * samples[s].alpha;
weight += f;
}
}
float invWeight = 1.0f / weight;
c.mul(invWeight);
a *= invWeight;
bucketRGB[index] = c;
bucketAlpha[index] = a;
}
}
}
// update pixels
display.imageUpdate(x0, y0, bw, bh, bucketRGB, bucketAlpha);
}
private void computeSubPixel(ImageSample sample, IntersectionState istate) {
if (stop) return;
float x = sample.rx;
float y = sample.ry;
double q0 = QMC.halton(1, sample.i);
double q1 = QMC.halton(2, sample.i);
double q2 = QMC.halton(3, sample.i);
if (superSampling > 1) {
// multiple sampling
sample.add(scene.getRadiance(istate, x, y, q1, q2, q0, sample.i, 4, null));
for (int i = 1; i < superSampling; i++) {
double time = QMC.mod1(q0 + i * invSuperSampling);
double lensU = QMC.mod1(q1 + QMC.halton(0, i));
double lensV = QMC.mod1(q2 + QMC.halton(1, i));
sample.add(scene.getRadiance(istate, x, y, lensU, lensV, time, sample.i + i, 4, null));
}
sample.scale((float) invSuperSampling);
} else {
// single sample
sample.set(scene.getRadiance(istate, x, y, q1, q2, q0, sample.i, 4, null));
}
}
private void refineSamples(ImageSample[] samples, int sbw, int x, int y, int stepSize, float thresh, IntersectionState istate) {
if (stop) return;
int dx = stepSize;
int dy = stepSize * sbw;
int i00 = x + y * sbw;
ImageSample s00 = samples[i00];
ImageSample s01 = samples[i00 + dy];
ImageSample s10 = samples[i00 + dx];
ImageSample s11 = samples[i00 + dx + dy];
if (!s00.sampled())
computeSubPixel(s00, istate);
if (!s01.sampled())
computeSubPixel(s01, istate);
if (!s10.sampled())
computeSubPixel(s10, istate);
if (!s11.sampled())
computeSubPixel(s11, istate);
if (stepSize > minStepSize) {
if (s00.isDifferent(s01, thresh) || s00.isDifferent(s10, thresh) || s00.isDifferent(s11, thresh) || s01.isDifferent(s11, thresh) || s10.isDifferent(s11, thresh) || s01.isDifferent(s10, thresh)) {
stepSize >>= 1;
thresh *= 2;
refineSamples(samples, sbw, x, y, stepSize, thresh, istate);
refineSamples(samples, sbw, x + stepSize, y, stepSize, thresh, istate);
refineSamples(samples, sbw, x, y + stepSize, stepSize, thresh, istate);
refineSamples(samples, sbw, x + stepSize, y + stepSize, stepSize, thresh, istate);
return;
}
}
// interpolate remaining samples
float ds = 1.0f / stepSize;
for (int i = 0; i <= stepSize; i++)
for (int j = 0; j <= stepSize; j++)
if (!samples[x + i + (y + j) * sbw].processed())
ImageSample.bilerp(samples[x + i + (y + j) * sbw], s00, s01, s10, s11, i * ds, j * ds);
}
private static final class ImageSample {
float rx, ry;
int i, n;
Color c;
float alpha;
Instance instance;
Shader shader;
float nx, ny, nz;
ImageSample(float rx, float ry, int i) {
this.rx = rx;
this.ry = ry;
this.i = i;
n = 0;
c = null;
alpha = 0;
instance = null;
shader = null;
nx = ny = nz = 1;
}
final void set(ShadingState state) {
if (state == null)
c = Color.BLACK;
else {
c = state.getResult();
shader = state.getShader();
instance = state.getInstance();
if (state.getNormal() != null) {
nx = state.getNormal().x;
ny = state.getNormal().y;
nz = state.getNormal().z;
}
alpha = state.getInstance() == null ? 0 : 1;
}
n = 1;
}
final void add(ShadingState state) {
if (n == 0)
c = Color.black();
if (state != null) {
c.add(state.getResult());
alpha += state.getInstance() == null ? 0 : 1;
}
n++;
}
final void scale(float s) {
c.mul(s);
alpha *= s;
}
final boolean processed() {
return c != null;
}
final boolean sampled() {
return n > 0;
}
final boolean isDifferent(ImageSample sample, float thresh) {
if (instance != sample.instance)
return true;
if (shader != sample.shader)
return true;
if (Color.hasContrast(c, sample.c, thresh))
return true;
if (Math.abs(alpha - sample.alpha) / (alpha + sample.alpha) > thresh)
return true;
// only compare normals if this pixel has not been averaged
float dot = (nx * sample.nx + ny * sample.ny + nz * sample.nz);
return dot < 0.9f;
}
static final ImageSample bilerp(ImageSample result, ImageSample i00, ImageSample i01, ImageSample i10, ImageSample i11, float dx, float dy) {
float k00 = (1.0f - dx) * (1.0f - dy);
float k01 = (1.0f - dx) * dy;
float k10 = dx * (1.0f - dy);
float k11 = dx * dy;
Color c00 = i00.c;
Color c01 = i01.c;
Color c10 = i10.c;
Color c11 = i11.c;
Color c = Color.mul(k00, c00);
c.madd(k01, c01);
c.madd(k10, c10);
c.madd(k11, c11);
result.c = c;
result.alpha = k00 * i00.alpha + k01 * i01.alpha + k10 * i10.alpha + k11 * i11.alpha;
return result;
}
}
}
| |
package com.google.android.finsky.activities;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.text.TextUtils;
import android.transition.Transition;
import android.util.DisplayMetrics;
import android.util.Pair;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.FrameLayout.LayoutParams;
import com.android.volley.VolleyError;
import com.google.android.finsky.FinskyApp;
import com.google.android.finsky.analytics.FinskyEventLog;
import com.google.android.finsky.analytics.PlayStore.PlayStoreUiElement;
import com.google.android.finsky.api.model.DfeBrowse;
import com.google.android.finsky.api.model.DfeList;
import com.google.android.finsky.api.model.DfeToc;
import com.google.android.finsky.api.model.Document;
import com.google.android.finsky.config.G;
import com.google.android.finsky.config.PreferenceFile.PrefixSharedPreference;
import com.google.android.finsky.config.PreferenceFile.SharedPreference;
import com.google.android.finsky.experiments.FinskyExperiments;
import com.google.android.finsky.fragments.PageFragment;
import com.google.android.finsky.fragments.PageFragmentHost;
import com.google.android.finsky.fragments.UrlBasedPageFragment;
import com.google.android.finsky.layout.ContentFrame;
import com.google.android.finsky.layout.ControlsContainerBackground;
import com.google.android.finsky.layout.ControlsContainerBackgroundCoordinator;
import com.google.android.finsky.layout.FinskyTabStrip;
import com.google.android.finsky.layout.HeaderLayoutSwitcher;
import com.google.android.finsky.layout.HeroGraphicView;
import com.google.android.finsky.layout.InsetsFrameLayout;
import com.google.android.finsky.layout.LayoutSwitcher;
import com.google.android.finsky.layout.actionbar.ActionBarBackgroundUpdater;
import com.google.android.finsky.layout.actionbar.ActionBarController;
import com.google.android.finsky.layout.play.FinskyHeaderListLayout;
import com.google.android.finsky.layout.play.FinskyHeaderListLayout.FinskyConfigurator;
import com.google.android.finsky.layout.play.FinskyViewPager;
import com.google.android.finsky.layout.play.FinskyViewPager.MeasureOverrider;
import com.google.android.finsky.layout.play.FinskyViewPagerScroller;
import com.google.android.finsky.layout.play.PlayHighlightsOverlayView;
import com.google.android.finsky.navigationmanager.NavigationManager;
import com.google.android.finsky.protos.Browse.BrowseResponse;
import com.google.android.finsky.protos.Browse.BrowseTab;
import com.google.android.finsky.protos.PrivacySetting;
import com.google.android.finsky.protos.Toc.CorpusMetadata;
import com.google.android.finsky.protos.Toc.TocResponse;
import com.google.android.finsky.protos.UserContext;
import com.google.android.finsky.transition.PageFade;
import com.google.android.finsky.utils.ClientMutationCache;
import com.google.android.finsky.utils.CorpusResourceUtils;
import com.google.android.finsky.utils.FinskyLog;
import com.google.android.finsky.utils.FinskyPreferences;
import com.google.android.finsky.utils.IntMath;
import com.google.android.finsky.utils.LocationHelper;
import com.google.android.finsky.utils.ObjectMap;
import com.google.android.finsky.utils.UiUtils;
import com.google.android.finsky.utils.Utils;
import com.google.android.finsky.utils.hats.HatsUtils;
import com.google.android.libraries.bind.bidi.BidiPagingHelper;
import com.google.android.play.headerlist.PlayHeaderListLayout;
import com.google.android.play.headerlist.PlayHeaderListLayout.OnTabSelectedListener;
import com.google.android.play.headerlist.PlayHeaderListTabStrip;
import com.google.android.play.search.PlaySearchToolbar;
import com.google.android.play.utils.config.GservicesValue;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public final class TabbedBrowseFragment
extends UrlBasedPageFragment
implements ViewPager.OnPageChangeListener, SpacerHeightProvider, TabbedAdapter.TabDataListener, PlayHeaderListLayout.OnTabSelectedListener
{
private ActionBarBackgroundUpdater mActionBarBackgroundUpdater;
public int mBackendId = 0;
private int mBackgroundType;
private ViewGroup mBackgroundViewGroup;
private String mBreadcrumb;
public DfeBrowse mBrowseData;
public ControlsContainerBackgroundCoordinator mControlsContainerBackgroundCoordinator;
private boolean mDataIsReady;
private ObjectMap mFragmentObjectMap = new ObjectMap();
private boolean mHasHighlightsPager;
private FinskyHeaderListLayout mHeaderListLayout;
private int mHeaderShadowMode = 3;
private HeroGraphicView mHeroGraphicView;
private ViewGroup mHeroViewRoot;
private DfeList[] mHighlightsData;
private PlayHighlightsOverlayView mHighlightsOverlay;
private FinskyViewPager mHighlightsPager;
private HighlightsPagerAdapter mHighlightsPagerAdapter;
private boolean mIgnoreHighlights;
private int mLeadingSpacerHeight;
private int mRestorePrevSelectedTabLogicalIndex = -1;
private FinskyTabStrip mTabStrip;
public TabbedAdapter mTabbedAdapter;
private PlayStore.PlayStoreUiElement mUiElementProto = FinskyEventLog.obtainPlayStoreUiElement(1);
private boolean mUseFixedTabs;
private UserContext mUserContext;
public FinskyViewPager mViewPager;
private void advanceState()
{
if ((this.mBrowseData == null) || (!this.mBrowseData.isReady()))
{
this.mIgnoreHighlights = false;
this.mBrowseData = new DfeBrowse(this.mDfeApi, this.mUrl, this.mUserContext);
this.mBrowseData.addDataChangedListener(this);
this.mBrowseData.addErrorListener(this);
}
int i;
for (;;)
{
return;
i = this.mBrowseData.mBrowseResponse.landingTabIndex;
Browse.BrowseTab[] arrayOfBrowseTab = this.mBrowseData.mBrowseResponse.browseTab;
if ((this.mIgnoreHighlights) || (TextUtils.isEmpty(arrayOfBrowseTab[i].highlightsBannerUrl))) {
break label237;
}
if ((this.mHighlightsData != null) && (this.mHighlightsData[i].isReady())) {
break;
}
this.mHighlightsData = new DfeList[arrayOfBrowseTab.length];
for (int j = 0; j < arrayOfBrowseTab.length; j++)
{
DfeList localDfeList = new DfeList(this.mDfeApi, arrayOfBrowseTab[j].highlightsBannerUrl, false);
this.mHighlightsData[j] = localDfeList;
if (j == this.mBrowseData.mBrowseResponse.landingTabIndex)
{
localDfeList.addDataChangedListener(this);
localDfeList.addErrorListener(this);
localDfeList.startLoadItems();
}
}
}
int k = this.mHighlightsData[i].getCount();
boolean bool = false;
if (k > 0) {
bool = true;
}
this.mHasHighlightsPager = bool;
label237:
PlayStore.PlayStoreUiElement localPlayStoreUiElement = this.mUiElementProto;
DfeBrowse localDfeBrowse = this.mBrowseData;
if ((localDfeBrowse.mBrowseResponse == null) || (localDfeBrowse.mBrowseResponse.serverLogsCookie.length == 0)) {}
for (byte[] arrayOfByte = null;; arrayOfByte = localDfeBrowse.mBrowseResponse.serverLogsCookie)
{
FinskyEventLog.setServerLogCookie(localPlayStoreUiElement, arrayOfByte);
this.mDataIsReady = true;
return;
}
}
private ColorDrawable getBackgroundColorDrawable()
{
return new ColorDrawable(CorpusResourceUtils.getPrimaryColor(getActivity(), this.mBackendId));
}
private boolean hasFixedTabs()
{
return (this.mBrowseData.mBrowseResponse.browseTab.length > 1) && (this.mUseFixedTabs);
}
private boolean isSearchBoxOnly()
{
if (this.mHasHighlightsPager) {}
for (;;)
{
return true;
DfeToc localDfeToc = this.mDfeToc;
String str = this.mUrl;
if ((TextUtils.equals(str, localDfeToc.mToc.homeUrl)) || (TextUtils.equals(str, localDfeToc.mToc.entertainmentHomeUrl))) {}
for (int i = 1; (i == 0) || (localDfeToc.getCorpusList().size() <= 1); i = 0) {
return false;
}
}
}
public static TabbedBrowseFragment newInstance$30e04d94(String paramString1, String paramString2, int paramInt, DfeToc paramDfeToc, boolean paramBoolean)
{
TabbedBrowseFragment localTabbedBrowseFragment = new TabbedBrowseFragment();
if (paramInt >= 0) {
localTabbedBrowseFragment.mBackendId = paramInt;
}
if (!TextUtils.isEmpty(paramString2)) {
localTabbedBrowseFragment.mBreadcrumb = paramString2;
}
localTabbedBrowseFragment.setDfeTocAndUrl(paramDfeToc, paramString1);
localTabbedBrowseFragment.setArgument("TabbedBrowseFragment.UseFixedTabs", paramBoolean);
localTabbedBrowseFragment.mUseFixedTabs = paramBoolean;
return localTabbedBrowseFragment;
}
protected final LayoutSwitcher createLayoutSwitcher(ContentFrame paramContentFrame)
{
return new HeaderLayoutSwitcher(paramContentFrame, this);
}
public final int getActionBarColor()
{
if ((isSearchBoxOnly()) || (!this.mDataIsReady)) {
return getResources().getColor(2131689681);
}
return CorpusResourceUtils.getPrimaryColor(getActivity(), this.mBackendId);
}
@TargetApi(22)
protected final Transition getCustomExitTransition()
{
return new PageFade(this.mBackendId);
}
protected final int getDefaultHeaderShadowMode()
{
return this.mHeaderShadowMode;
}
protected final int getLayoutRes()
{
return 2130968781;
}
public final int getLeadingSpacerHeight()
{
return this.mLeadingSpacerHeight;
}
public final PlayStore.PlayStoreUiElement getPlayStoreUiElement()
{
return this.mUiElementProto;
}
public final void onActivityCreated(Bundle paramBundle)
{
super.onActivityCreated(paramBundle);
if (!this.mDataIsReady)
{
switchToLoading();
advanceState();
rebindActionBar();
this.mActionBarController.enableActionBarOverlay();
if (this.mBackendId != 3) {
break label57;
}
HatsUtils.showSurveyIfAvailable(3, this.mPageFragmentHost);
}
label57:
while (this.mBackendId != 0)
{
return;
rebindViews();
break;
}
HatsUtils.showSurveyIfAvailable(1, this.mPageFragmentHost);
}
public final boolean onBackPressed()
{
if (!FinskyApp.get().getExperiments().isH20StoreEnabled()) {
return super.onBackPressed();
}
if ((this.mNavigationManager.getCurrentPageType() == 1) && (this.mBackendId != 3))
{
DfeToc localDfeToc = this.mDfeToc;
if (TextUtils.equals(this.mUrl, localDfeToc.mToc.homeUrl))
{
FinskyApp.get().getEventLogger().logClickEvent(600, null, this.mNavigationManager.getActivePage());
this.mNavigationManager.goToAggregatedHome(localDfeToc);
return true;
}
}
return super.onBackPressed();
}
public final void onBeforeTabSelected(int paramInt)
{
TabbedAdapter.TabSelectionTracker.access$300$4437258e(this.mTabbedAdapter.mTabSelectionTracker);
}
public final void onCreate(Bundle paramBundle)
{
int i = 1;
super.onCreate(paramBundle);
setRetainInstance$1385ff();
this.mUseFixedTabs = this.mArguments.getBoolean("TabbedBrowseFragment.UseFixedTabs");
int j;
Integer localInteger;
int k;
label113:
int n;
if (isSearchBoxOnly())
{
j = i;
this.mBackgroundType = j;
if (!FinskyApp.get().getExperiments().isEnabled(12603100L)) {
break label229;
}
String str = FinskyApp.get().getCurrentAccountName();
localInteger = (Integer)FinskyPreferences.locationSuggestionsEnabled.get(str).get();
PrivacySetting localPrivacySetting = this.mDfeToc.getUserPrivacySetting(i);
if (localPrivacySetting == null) {
break label229;
}
if (!localPrivacySetting.enabledByDefault) {
break label199;
}
if (localInteger.intValue() == 2) {
break label193;
}
k = i;
if (k == 0) {
break label241;
}
String[] arrayOfString = Utils.commaUnpackStrings((String)G.homeBrowseUrlsForUserContextHeader.get());
int m = arrayOfString.length;
n = 0;
label140:
if (n >= m) {
break label241;
}
if (!arrayOfString[n].equals(this.mUrl)) {
break label235;
}
}
for (;;)
{
if (i == 0) {
break label246;
}
this.mUserContext = new UserContext();
this.mUserContext.location = LocationHelper.getLocationProto();
return;
j = 0;
break;
label193:
k = 0;
break label113;
label199:
if ((localInteger.intValue() == i) || (localInteger.intValue() == 3))
{
k = i;
break label113;
}
k = 0;
break label113;
label229:
k = 0;
break label113;
label235:
n++;
break label140;
label241:
i = 0;
}
label246:
this.mUserContext = null;
}
public final void onDataChanged()
{
if (!this.mDataIsReady) {
advanceState();
}
if (this.mDataIsReady) {
super.onDataChanged();
}
}
public final void onDestroyView()
{
ObjectMap localObjectMap2;
TabbedAdapter localTabbedAdapter;
if ((this.mDataIsReady) && (this.mViewPager != null))
{
this.mRestorePrevSelectedTabLogicalIndex = BidiPagingHelper.getLogicalPosition(this.mTabbedAdapter, this.mViewPager.getCurrentItem());
localObjectMap2 = new ObjectMap();
localTabbedAdapter = this.mTabbedAdapter;
localTabbedAdapter.mIsViewDestroyed = true;
if ((localTabbedAdapter.mTabDataList != null) && (!localTabbedAdapter.mTabDataList.isEmpty())) {
break label286;
}
}
label286:
ArrayList localArrayList;
for (Object localObject = null;; localObject = localArrayList)
{
localObjectMap2.put("TabbedAdapter.TabInstanceStates", localObject);
localObjectMap2.put("TabbedAdapter.TabDfeLists", localTabbedAdapter.getMultiDfeLists());
this.mFragmentObjectMap.put("TabbedBrowseFragment.AdapterState", localObjectMap2);
this.mPageFragmentHost.hideSatisfactionSurveyV2();
if (this.mHeaderListLayout != null)
{
this.mHeaderListLayout.setOnPageChangeListener(null);
this.mHeaderListLayout.setOnTabSelectedListener(null);
}
if (this.mViewPager != null)
{
this.mViewPager.setAdapter(null);
this.mViewPager = null;
}
this.mTabbedAdapter = null;
if (this.mHeaderListLayout != null) {
this.mHeaderListLayout.detachIfNeeded();
}
if (this.mActionBarBackgroundUpdater != null)
{
this.mActionBarBackgroundUpdater.reset();
this.mActionBarBackgroundUpdater = null;
}
this.mBackgroundViewGroup = null;
this.mHeroGraphicView = null;
if (this.mHighlightsPagerAdapter != null)
{
ObjectMap localObjectMap1 = new ObjectMap();
this.mHighlightsPagerAdapter.onDestroyView(localObjectMap1);
this.mFragmentObjectMap.put("TabbedBrowseFragment.HighlightsState", localObjectMap1);
}
this.mHighlightsPager = null;
this.mHighlightsPagerAdapter = null;
this.mHighlightsOverlay = null;
this.mHeroViewRoot = null;
this.mTabStrip = null;
if (this.mControlsContainerBackgroundCoordinator != null)
{
this.mControlsContainerBackgroundCoordinator.detach();
this.mControlsContainerBackgroundCoordinator = null;
}
super.onDestroyView();
return;
localArrayList = new ArrayList();
Iterator localIterator = localTabbedAdapter.mTabDataList.iterator();
while (localIterator.hasNext())
{
TabbedAdapter.TabData localTabData = (TabbedAdapter.TabData)localIterator.next();
if (localTabData.viewPagerTab != null) {
localArrayList.add(localTabData.viewPagerTab.onDestroyView());
} else {
localArrayList.add(localTabData.instanceState);
}
}
}
}
public final void onEnterActionBarSearchMode()
{
if ((this.mTabbedAdapter != null) && (this.mTabbedAdapter.getCount() == 1))
{
super.onEnterActionBarSearchMode();
if (this.mActionBarBackgroundUpdater != null) {
this.mActionBarBackgroundUpdater.setSearchMode(true);
}
}
}
public final void onErrorResponse(VolleyError paramVolleyError)
{
if ((this.mBrowseData != null) && (this.mBrowseData.isReady()))
{
this.mIgnoreHighlights = true;
this.mHighlightsData = null;
advanceState();
if (this.mDataIsReady) {
super.onDataChanged();
}
return;
}
super.onErrorResponse(paramVolleyError);
}
public final void onExitActionBarSearchMode()
{
if ((this.mTabbedAdapter != null) && (this.mTabbedAdapter.getCount() == 1))
{
super.onExitActionBarSearchMode();
if (this.mActionBarBackgroundUpdater != null) {
this.mActionBarBackgroundUpdater.setSearchMode(false);
}
}
}
public final void onPageScrollStateChanged(int paramInt)
{
TabbedAdapter.TabSelectionTracker.access$400(this.mTabbedAdapter.mTabSelectionTracker, paramInt);
}
public final void onPageScrolled(int paramInt1, float paramFloat, int paramInt2) {}
public final void onPageSelected(int paramInt)
{
int i = BidiPagingHelper.getLogicalPosition(this.mTabbedAdapter, paramInt);
String str = this.mTabbedAdapter.getPageTitle(i);
if ((!TextUtils.isEmpty(str)) && (this.mNavigationManager != null) && (this.mNavigationManager.isOnBrowsePage())) {
UiUtils.sendAccessibilityEventWithText(this.mContext, this.mContext.getString(2131361816, new Object[] { str }), this.mViewPager);
}
}
public final void onTabDataReady(final BackgroundViewConfigurator paramBackgroundViewConfigurator)
{
int i = 1;
if (this.mHasHighlightsPager) {
return;
}
if ((this.mTabbedAdapter.getCount() == i) && (paramBackgroundViewConfigurator.hasBackgroundView()))
{
this.mHeroGraphicView.setVisibility(0);
this.mHeaderListLayout.post(new Runnable()
{
public final void run()
{
paramBackgroundViewConfigurator.configureBackgroundView(TabbedBrowseFragment.this.mHeroGraphicView, TabbedBrowseFragment.this.mBackendId);
}
});
this.mHeaderListLayout.setAlwaysUseFloatingBackground(false);
this.mHeaderListLayout.setHeaderShadowMode(paramBackgroundViewConfigurator.getHeaderShadowMode());
Resources localResources = this.mContext.getResources();
int k = HeroGraphicView.getSpacerHeight(this.mContext, localResources.getDisplayMetrics().widthPixels, i, 0.0F) + (int)(0.3F * localResources.getDimensionPixelSize(2131492936));
if (InsetsFrameLayout.SUPPORTS_IMMERSIVE_MODE) {
k -= UiUtils.getStatusBarHeight(this.mContext);
}
this.mHeaderListLayout.setTabMode(2, k);
this.mActionBarBackgroundUpdater = new ActionBarBackgroundUpdater(getActivity().getWindow(), this.mHeaderListLayout);
this.mHeaderListLayout.setOnLayoutChangedListener(this.mActionBarBackgroundUpdater);
this.mActionBarBackgroundUpdater.updateActionBar();
return;
}
this.mHeroGraphicView.setVisibility(4);
FinskyHeaderListLayout localFinskyHeaderListLayout = this.mHeaderListLayout;
if (this.mBackgroundType != i) {}
for (;;)
{
localFinskyHeaderListLayout.setAlwaysUseFloatingBackground(i);
this.mHeaderListLayout.setOnLayoutChangedListener(null);
return;
int j = 0;
}
}
public final void onTabSelected(int paramInt)
{
onTabSelectedInternal(paramInt, true);
}
public void onTabSelectedInternal(int paramInt, boolean paramBoolean)
{
int i = BidiPagingHelper.getLogicalPosition(this.mTabbedAdapter, paramInt);
if (paramBoolean)
{
TabbedAdapter localTabbedAdapter = this.mTabbedAdapter;
if ((i >= 0) && (i < localTabbedAdapter.mTabDataList.size())) {
FinskyApp.get().getEventLogger().logClickEvent(((TabbedAdapter.TabData)localTabbedAdapter.mTabDataList.get(i)).elementNode);
}
}
ControlsContainerBackgroundCoordinator localControlsContainerBackgroundCoordinator;
ColorDrawable localColorDrawable;
HighlightsPagerAdapter localHighlightsPagerAdapter1;
Document localDocument;
label219:
PlayHighlightsOverlayView localPlayHighlightsOverlayView2;
HighlightsPagerAdapter localHighlightsPagerAdapter2;
if (hasFixedTabs())
{
this.mBackendId = this.mBrowseData.mBrowseResponse.browseTab[i].backendId;
this.mPageFragmentHost.updateCurrentBackendId(this.mBackendId, true);
localControlsContainerBackgroundCoordinator = this.mControlsContainerBackgroundCoordinator;
int j = this.mBackendId;
localColorDrawable = new ColorDrawable(CorpusResourceUtils.getPrimaryColor(localControlsContainerBackgroundCoordinator.mContext, j));
if (localControlsContainerBackgroundCoordinator.mBackground != null) {
break label320;
}
localControlsContainerBackgroundCoordinator.mQueuedBackgroundDrawable = localColorDrawable;
this.mHeaderListLayout.setStatusBarColors(this.mContext.getResources().getColor(2131689730), Color.alpha(0));
if (this.mHasHighlightsPager)
{
this.mHighlightsPager.setCurrentItem(paramInt);
PlayHighlightsOverlayView localPlayHighlightsOverlayView1 = this.mHighlightsOverlay;
localHighlightsPagerAdapter1 = this.mHighlightsPagerAdapter;
if ((i >= 0) && (i < localHighlightsPagerAdapter1.mHighlightsTabList.size())) {
break label345;
}
localDocument = null;
int k = this.mHighlightsPagerAdapter.getHighlightBannerCountForPage(i);
if (i != localPlayHighlightsOverlayView1.mCurrentHighlightsLogicalSection)
{
localPlayHighlightsOverlayView1.onPagesConfigurationChanged(k);
localPlayHighlightsOverlayView1.mCurrentHighlightsLogicalSection = i;
localPlayHighlightsOverlayView1.loadTitles(localDocument);
}
localPlayHighlightsOverlayView2 = this.mHighlightsOverlay;
localHighlightsPagerAdapter2 = this.mHighlightsPagerAdapter;
if ((i >= 0) && (i < localHighlightsPagerAdapter2.mHighlightsTabList.size())) {
break label365;
}
}
}
label320:
label345:
label365:
HighlightsPagerAdapter.HighlightsTab localHighlightsTab;
for (int m = 0;; m = localHighlightsTab.translatePosition(localHighlightsTab.mCurrentBanner))
{
localPlayHighlightsOverlayView2.setCurrentPage(m);
this.mHighlightsPagerAdapter.setCurrentPage(i);
this.mHighlightsPager.setBackgroundColor(CorpusResourceUtils.getPrimaryColor(this.mContext, 0));
return;
localControlsContainerBackgroundCoordinator.mQueuedBackgroundDrawable = null;
localControlsContainerBackgroundCoordinator.mBackground.setBackgroundDrawable(localColorDrawable, localControlsContainerBackgroundCoordinator.mLastTouchX, true);
break;
localDocument = HighlightsPagerAdapter.HighlightsTab.access$300((HighlightsPagerAdapter.HighlightsTab)localHighlightsPagerAdapter1.mHighlightsTabList.get(i));
break label219;
localHighlightsTab = (HighlightsPagerAdapter.HighlightsTab)localHighlightsPagerAdapter2.mHighlightsTabList.get(i);
}
}
public final void rebindActionBar()
{
this.mPageFragmentHost.updateActionBarTitle(this.mBreadcrumb);
this.mPageFragmentHost.updateCurrentBackendId(this.mBackendId, this.mDataIsReady);
if (isSearchBoxOnly())
{
localResources = this.mContext.getResources();
if ((FinskyApp.get().getExperiments().isH20StoreEnabled()) && (localResources.getBoolean(2131427336)))
{
i = IntMath.heightToWidth$4868d301(localResources.getDimensionPixelSize(2131492939) - localResources.getDimensionPixelSize(2131493027));
j = 2 * localResources.getDimensionPixelOffset(2131492893);
this.mPageFragmentHost.overrideSearchBoxWidth(i - j);
}
this.mPageFragmentHost.switchToSearchBoxOnlyActionBar(1);
}
while (!this.mDataIsReady)
{
Resources localResources;
int i;
int j;
return;
}
this.mPageFragmentHost.switchToRegularActionBar();
}
public final void rebindViews()
{
ClientMutationCache localClientMutationCache;
boolean bool2;
if ((this.mBrowseData != null) && (this.mBrowseData.isFamilySafeSearchModeDefined()))
{
localClientMutationCache = FinskyApp.get().getClientMutationCache(FinskyApp.get().getCurrentAccountName());
DfeBrowse localDfeBrowse = this.mBrowseData;
if ((!localDfeBrowse.isFamilySafeSearchModeDefined()) || (!localDfeBrowse.mBrowseResponse.isFamilySafe)) {
break label1172;
}
bool2 = true;
}
for (;;)
{
localClientMutationCache.mFamilySafeSearchMode = bool2;
final Resources localResources1 = getResources();
this.mHeaderListLayout = ((FinskyHeaderListLayout)this.mDataView);
this.mHeaderListLayout.configure(new PlayHeaderListConfigurator(this.mHeaderListLayout.getContext(), this.mBackgroundType));
this.mHeaderListLayout.setContentViewId(2131755589);
this.mHeaderListLayout.setBackgroundViewForTouchPassthrough(this.mBackgroundViewGroup);
if (this.mBackgroundType != 1)
{
this.mHeaderListLayout.setAlwaysUseFloatingBackground(true);
this.mHeaderListLayout.setFloatingControlsBackground(getBackgroundColorDrawable());
}
this.mBackendId = this.mBrowseData.mBrowseResponse.backendId;
String str = this.mBrowseData.mBrowseResponse.title;
Toc.CorpusMetadata localCorpusMetadata;
label207:
LayoutInflater localLayoutInflater1;
final int i;
label392:
boolean bool1;
label563:
LayoutInflater localLayoutInflater2;
final int i2;
FinskyViewPager localFinskyViewPager;
AccelerateDecelerateInterpolator localAccelerateDecelerateInterpolator;
if (str == null)
{
localCorpusMetadata = this.mDfeToc.getCorpus(this.mBackendId);
if (localCorpusMetadata == null) {
str = "";
}
}
else
{
this.mBreadcrumb = str;
rebindActionBar();
if ((!TextUtils.isEmpty(this.mBreadcrumb)) && (this.mNavigationManager != null) && (this.mNavigationManager.isOnBrowsePage())) {
UiUtils.sendAccessibilityEventWithText(this.mContext, this.mBreadcrumb, this.mView);
}
localLayoutInflater1 = getActivity().getLayoutInflater();
this.mTabbedAdapter = new TabbedAdapter(this.mContext, localLayoutInflater1, this.mNavigationManager, this.mDfeToc, this.mDfeApi, FinskyApp.get().getClientMutationCache(FinskyApp.get().getCurrentAccountName()), this.mUserContext, this.mBitmapLoader, this.mBrowseData.mBrowseResponse.browseTab, this.mBrowseData.mBrowseResponse.quickLink, this.mBrowseData.mBrowseResponse.quickLinkTabIndex, this.mBrowseData.mBrowseResponse.quickLinkFallbackTabIndex, this.mBackendId, (ObjectMap)this.mFragmentObjectMap.get("TabbedBrowseFragment.AdapterState"), this, this.mActionBarController, this, this);
if (this.mTabbedAdapter.getCount() <= 1) {
break label1211;
}
i = 0;
this.mTabStrip.setAnimateOnTabClick(hasFixedTabs());
if ((hasFixedTabs()) || (this.mHasHighlightsPager))
{
FinskyTabStrip localFinskyTabStrip = this.mTabStrip;
localFinskyTabStrip.mEnableFixedTabs = true;
Resources localResources2 = localFinskyTabStrip.getResources();
localFinskyTabStrip.setSelectedUnderlineThickness(localResources2.getDimensionPixelSize(2131493304));
localFinskyTabStrip.mUseFixedTabWideLayout = localResources2.getBoolean(2131427334);
if (localFinskyTabStrip.mUseFixedTabWideLayout)
{
View localView = localFinskyTabStrip.findViewById(2131755916);
FrameLayout.LayoutParams localLayoutParams = (FrameLayout.LayoutParams)localView.getLayoutParams();
localLayoutParams.gravity = 17;
localView.setLayoutParams(localLayoutParams);
}
localFinskyTabStrip.requestLayout();
this.mTabStrip.notifyPagerAdapterChanged();
this.mHeaderListLayout.setAlwaysUseFloatingBackground(false);
ControlsContainerBackgroundCoordinator localControlsContainerBackgroundCoordinator = this.mControlsContainerBackgroundCoordinator;
localControlsContainerBackgroundCoordinator.mHeaderListLayout = this.mHeaderListLayout;
localControlsContainerBackgroundCoordinator.updateBackgroundHeightAndFades();
}
FinskyHeaderListLayout localFinskyHeaderListLayout1 = this.mHeaderListLayout;
if (this.mBackendId == 9) {
break label1217;
}
bool1 = true;
localFinskyHeaderListLayout1.setShouldUseScrollLocking(bool1);
this.mViewPager = ((FinskyViewPager)this.mDataView.findViewById(2131755589));
this.mViewPager.setAdapter(this.mTabbedAdapter);
this.mViewPager.setPageMargin(localResources1.getDimensionPixelSize(2131493528));
this.mHeaderListLayout.mTabStrip.notifyPagerAdapterChanged();
this.mHeaderListLayout.setOnPageChangeListener(this);
this.mHeaderListLayout.setOnTabSelectedListener(this);
if (!hasFixedTabs()) {
this.mHeaderListLayout.setStatusBarColors(localResources1.getColor(2131689730), CorpusResourceUtils.getPrimaryColor(this.mContext, this.mBackendId));
}
localLayoutInflater2 = LayoutInflater.from(getContext());
if ((this.mHasHighlightsPager) || (hasFixedTabs()))
{
this.mViewPager.mAreTouchEventsDisabled = true;
this.mHeaderListLayout.setHeaderMode(0);
}
if (!this.mHasHighlightsPager) {
break label1261;
}
this.mHighlightsPagerAdapter = new HighlightsPagerAdapter(this.mDfeApi, this.mHighlightsData, this.mContext, localLayoutInflater1, this.mBitmapLoader, this.mNavigationManager, this.mTabbedAdapter, (ObjectMap)this.mFragmentObjectMap.get("TabbedBrowseFragment.HighlightsState"));
this.mHighlightsPager = ((FinskyViewPager)localLayoutInflater2.inflate(2130968787, this.mBackgroundViewGroup, false));
if (i != 2) {
break label1223;
}
i2 = 0;
this.mHighlightsPager.setMeasureOverrider(new FinskyViewPager.MeasureOverrider()
{
public final Pair<Integer, Integer> overrideMeasure$2816499f(int paramAnonymousInt)
{
int i = localResources1.getDimensionPixelSize(2131493027);
if (localResources1.getBoolean(2131427336)) {}
for (int j = localResources1.getDimensionPixelSize(2131492939);; j = i + 9 * View.MeasureSpec.getSize(paramAnonymousInt) / 16)
{
TabbedBrowseFragment.access$002(TabbedBrowseFragment.this, j + i2);
if (InsetsFrameLayout.SUPPORTS_IMMERSIVE_MODE) {
TabbedBrowseFragment.access$020(TabbedBrowseFragment.this, UiUtils.getStatusBarHeight(TabbedBrowseFragment.this.mContext));
}
TabbedBrowseFragment.this.mHeaderListLayout.setTabMode(i, TabbedBrowseFragment.this.mLeadingSpacerHeight);
if (TabbedBrowseFragment.this.mHighlightsOverlay != null) {
TabbedBrowseFragment.this.mHighlightsOverlay.setHighlightWidth(IntMath.heightToWidth$4868d301(j - i));
}
return Pair.create(Integer.valueOf(paramAnonymousInt), Integer.valueOf(View.MeasureSpec.makeMeasureSpec(j, 1073741824)));
}
}
});
this.mHighlightsPager.setAdapter(this.mHighlightsPagerAdapter);
this.mHighlightsPager.setPageTransformer$382b7817(new CrossfadeTransformer(this.mHighlightsPagerAdapter));
this.mHighlightsPager.mAreTouchEventsDisabled = true;
localFinskyViewPager = this.mHighlightsPager;
localAccelerateDecelerateInterpolator = new AccelerateDecelerateInterpolator();
}
try
{
Field localField = ViewPager.class.getDeclaredField("mScroller");
localField.setAccessible(true);
localField.set(localFinskyViewPager, new FinskyViewPagerScroller(localFinskyViewPager.getContext(), localAccelerateDecelerateInterpolator, 500));
FinskyHeaderListLayout localFinskyHeaderListLayout3 = this.mHeaderListLayout;
if (this.mHighlightsPagerAdapter.getCount() > 1)
{
i3 = 3;
localFinskyHeaderListLayout3.setHeaderShadowMode(i3);
this.mHeaderListLayout.setAlwaysUseFloatingBackground(false);
this.mHighlightsPager.setClickable(true);
this.mHeaderListLayout.setBackgroundViewForTouchPassthrough(this.mHighlightsPager);
this.mBackgroundViewGroup.addView(this.mHighlightsPager);
if ((this.mHighlightsOverlay == null) && (this.mHeroViewRoot != null))
{
this.mHighlightsOverlay = ((PlayHighlightsOverlayView)localLayoutInflater1.inflate(2130968991, this.mHeroViewRoot, false));
this.mHeroViewRoot.getLayoutParams().width = -1;
this.mHeroViewRoot.getLayoutParams().height = -1;
this.mHeroViewRoot.addView(this.mHighlightsOverlay);
this.mHighlightsOverlay.setPadding(0, 0, 0, i2);
this.mHighlightsOverlay.onPagesConfigurationChanged(this.mHighlightsPagerAdapter.getHighlightBannerCountForPage(0));
this.mHighlightsPagerAdapter.setHighlightsPageListener(this.mHighlightsOverlay);
}
if (this.mBackgroundType != 1) {
this.mHeaderListLayout.setFloatingControlsBackground(getBackgroundColorDrawable());
}
if (this.mRestorePrevSelectedTabLogicalIndex == -1) {
break label1353;
}
m = this.mRestorePrevSelectedTabLogicalIndex;
this.mRestorePrevSelectedTabLogicalIndex = -1;
int n = BidiPagingHelper.getLogicalPosition(this.mTabbedAdapter, this.mViewPager.getCurrentItem());
i1 = BidiPagingHelper.getVisualPosition(this.mTabbedAdapter, m);
if (n != m) {
break label1368;
}
onTabSelectedInternal(i1, false);
this.mLayoutSwitcher.switchToDataMode();
this.mAttachToFrameRunnable.run();
return;
label1172:
bool2 = false;
continue;
if (!this.mNavigationManager.canGoUp())
{
str = this.mContext.getString(2131362289);
break label207;
}
str = localCorpusMetadata.name;
break label207;
label1211:
i = 2;
break label392;
label1217:
bool1 = false;
break label563;
label1223:
i2 = localResources1.getDimensionPixelSize(2131493424);
}
}
catch (NoSuchFieldException localNoSuchFieldException)
{
for (;;)
{
int i1;
FinskyLog.wtf("Error setting animation parameters", new Object[] { localNoSuchFieldException });
continue;
int i3 = 2;
continue;
this.mHeroGraphicView = ((HeroGraphicView)localLayoutInflater2.inflate(2130968785, this.mBackgroundViewGroup, false));
this.mHeroGraphicView.setVisibility(4);
this.mBackgroundViewGroup.addView(this.mHeroGraphicView);
Context localContext = this.mContext;
int j = this.mHeaderListLayout.getActionBarHeight();
this.mLeadingSpacerHeight = FinskyHeaderListLayout.getMinimumHeaderHeight(localContext, i, 0, j);
FinskyHeaderListLayout localFinskyHeaderListLayout2 = this.mHeaderListLayout;
int k = this.mLeadingSpacerHeight;
localFinskyHeaderListLayout2.setTabMode(i, k);
continue;
int m = this.mBrowseData.mBrowseResponse.landingTabIndex;
continue;
this.mViewPager.setCurrentItem(i1, false);
onTabSelectedInternal(i1, false);
}
}
catch (IllegalArgumentException localIllegalArgumentException)
{
label1353:
label1368:
break label1237;
}
catch (IllegalAccessException localIllegalAccessException)
{
label1237:
label1261:
break label1237;
}
}
}
protected final void requestData()
{
advanceState();
}
protected final boolean shouldDelayAttachingDataView()
{
return true;
}
public static abstract interface BackgroundViewConfigurator
{
public abstract void configureBackgroundView(HeroGraphicView paramHeroGraphicView, int paramInt);
public abstract int getHeaderShadowMode();
public abstract boolean hasBackgroundView();
}
private final class PlayHeaderListConfigurator
extends FinskyHeaderListLayout.FinskyConfigurator
{
private final int mBackgroundMode;
public PlayHeaderListConfigurator(Context paramContext, int paramInt)
{
super();
this.mBackgroundMode = paramInt;
}
protected final void addBackgroundView(LayoutInflater paramLayoutInflater, ViewGroup paramViewGroup)
{
TabbedBrowseFragment.access$602(TabbedBrowseFragment.this, paramViewGroup);
}
protected final void addContentView(LayoutInflater paramLayoutInflater, ViewGroup paramViewGroup)
{
paramLayoutInflater.inflate(2130968782, paramViewGroup);
}
protected final void addHeroView$39fc0c(ViewGroup paramViewGroup)
{
TabbedBrowseFragment.access$902(TabbedBrowseFragment.this, paramViewGroup);
}
protected final boolean allowImmersiveBackground()
{
return true;
}
protected final boolean alwaysUseFloatingBackground()
{
return false;
}
protected final float getBackgroundViewParallaxRatio()
{
return 0.8F;
}
protected final int getContentProtectionMode()
{
if (TabbedBrowseFragment.this.mHasHighlightsPager) {
return 1;
}
return super.getContentProtectionMode();
}
protected final int getHeaderMode()
{
return 1;
}
protected final int getHeaderShadowMode()
{
if (this.mBackgroundMode != 1) {
return 3;
}
return 2;
}
protected final int getHeroAnimationMode()
{
if (FinskyApp.get().getExperiments().isEnabled(12603505L)) {
return 3;
}
return super.getHeroAnimationMode();
}
public final int getLeadingSpacerHeight()
{
return TabbedBrowseFragment.this.mLeadingSpacerHeight;
}
protected final int getListViewId()
{
return 2131755329;
}
protected final int getStatusBarOverlayColor()
{
return this.mContext.getResources().getColor(2131689730);
}
protected final int getStatusBarUnderlayColor()
{
return CorpusResourceUtils.getPrimaryColor(this.mContext, TabbedBrowseFragment.this.mBackendId);
}
protected final int getTabPaddingMode()
{
return 1;
}
protected final int getToolBarHeight(Context paramContext)
{
if ((TabbedBrowseFragment.this.hasFixedTabs()) || (TabbedBrowseFragment.this.mHasHighlightsPager)) {
return PlaySearchToolbar.getToolbarHeight(this.mContext) + TabbedBrowseFragment.this.getResources().getDimensionPixelSize(2131493013);
}
return super.getToolBarHeight(paramContext);
}
protected final int getViewPagerId()
{
return 2131755589;
}
protected final boolean hasViewPager()
{
return true;
}
}
}
/* Location: F:\apktool\apktool\Google_Play_Store6.0.5\classes-dex2jar.jar
* Qualified Name: com.google.android.finsky.activities.TabbedBrowseFragment
* JD-Core Version: 0.7.0.1
*/
| |
package com.popdeem.sdk.uikit.fragment.multilogin;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.content.res.TypedArray;
import android.location.Location;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AlertDialog;
import android.support.v7.view.ContextThemeWrapper;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.facebook.AccessToken;
import com.facebook.CallbackManager;
import com.facebook.FacebookCallback;
import com.facebook.FacebookException;
import com.facebook.login.LoginManager;
import com.facebook.login.LoginResult;
import com.google.android.gms.location.LocationListener;
import com.popdeem.sdk.PDLoginCallback;
import com.popdeem.sdk.R;
import com.popdeem.sdk.core.PopdeemSDK;
import com.popdeem.sdk.core.api.PDAPICallback;
import com.popdeem.sdk.core.api.PDAPIClient;
import com.popdeem.sdk.core.api.abra.PDAbraConfig;
import com.popdeem.sdk.core.api.abra.PDAbraLogEvent;
import com.popdeem.sdk.core.api.abra.PDAbraProperties;
import com.popdeem.sdk.core.interfaces.PDFragmentCommunicator;
import com.popdeem.sdk.core.location.PDLocationManager;
import com.popdeem.sdk.core.model.PDInstagramResponse;
import com.popdeem.sdk.core.model.PDReward;
import com.popdeem.sdk.core.model.PDUser;
import com.popdeem.sdk.core.realm.PDRealmGCM;
import com.popdeem.sdk.core.realm.PDRealmUserDetails;
import com.popdeem.sdk.core.utils.PDLog;
import com.popdeem.sdk.core.utils.PDPreferencesUtils;
import com.popdeem.sdk.core.utils.PDSocialUtils;
import com.popdeem.sdk.core.utils.PDUtils;
import com.popdeem.sdk.uikit.fragment.PDUIInstagramLoginFragment;
import com.popdeem.sdk.uikit.fragment.PDUIRewardsFragment;
import com.popdeem.sdk.uikit.fragment.PDUISocialLoginFragment;
import com.popdeem.sdk.uikit.utils.PDUIDialogUtils;
import com.twitter.sdk.android.core.Callback;
import com.twitter.sdk.android.core.Result;
import com.twitter.sdk.android.core.TwitterAuthConfig;
import com.twitter.sdk.android.core.TwitterException;
import com.twitter.sdk.android.core.TwitterSession;
import com.twitter.sdk.android.core.identity.TwitterLoginButton;
import java.util.ArrayList;
import java.util.Arrays;
import io.realm.Realm;
/**
* Created by dave on 21/04/2017.
* Project: Popdeem-SDK-Android
*/
public class PDUISocialMultiLoginFragment_V2 extends Fragment implements View.OnClickListener {
private static String TAG = PDUISocialMultiLoginFragment_V2.class.getSimpleName();
private View view;
private final int LOCATION_PERMISSION_REQUEST = 90;
private PDLocationManager mLocationManager;
private ProgressBar mProgressFacebook;
private LinearLayout progressView;
private TextView mRewardsInfoTextView;
private Button mContinueButton;
private Button mFacebookLoginButton;
private Button mTwitterLoginButton;
private Button mInstaLoginButton;
private CallbackManager mCallbackManager;
private boolean mAskForPermission = true;
private boolean isFacebook = false, isTwitter = false, isInstagram = false;
private boolean isInternetAvailable= false;
private Location location;
private PDFragmentCommunicator communicator; //used for certain instances where login does not occur at the beginning
private ArrayList<PDReward> rewards;
private boolean doingLogin = false;
private Handler handler;
private Runnable runny;
private boolean loggedIn = false;
private int animIn = -1;
private int animOut = -1;
private PDLoginCallback callback;
private PDLoginListener loginListener;
public interface PDLoginListener {
void onPDLoginSuccess();
void onPDLoginCancel();
}
public PDUISocialMultiLoginFragment_V2() {
}
public static PDUISocialMultiLoginFragment_V2 newInstance(PDLoginCallback callBack, boolean isInternetAvailable) {
PDUISocialMultiLoginFragment_V2 frag = new PDUISocialMultiLoginFragment_V2();
frag.animIn = PopdeemSDK.animIn;
frag.animOut = PopdeemSDK.animOut;
frag.callback = callBack;
frag.isInternetAvailable = isInternetAvailable;
return frag;
}
public static PDUISocialMultiLoginFragment_V2 newInstance(ArrayList<PDReward> rewards, PDLoginCallback callBack, boolean isInternetAvailable) {
PDUISocialMultiLoginFragment_V2 frag = new PDUISocialMultiLoginFragment_V2();
frag.rewards = rewards;
frag.animIn = PopdeemSDK.animIn;
frag.animOut = PopdeemSDK.animOut;
frag.callback = callBack;
frag.isInternetAvailable = isInternetAvailable;
return frag;
}
@Override
public Animation onCreateAnimation(int transit, boolean enter, int nextAnim) {
if (animIn > 0 && animOut > 0) {
return enter ? AnimationUtils.loadAnimation(getActivity(), animIn) : AnimationUtils.loadAnimation(getActivity(), animOut);
}
return super.onCreateAnimation(transit, enter, nextAnim);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
view = inflater.inflate(R.layout.fragment_pd_social_multi_login_v2, container, false);
TextView titleTextView = view.findViewById(R.id.pd_social_rewards_title_text_view);
TextView infoTextView = view.findViewById(R.id.pd_social_rewards_info_text_view);
Log.i(TAG, "onCreateView: " + PDPreferencesUtils.getLoginUsesCount(getActivity()));
PDAbraLogEvent.log(PDAbraConfig.ABRA_EVENT_PAGE_VIEWED, new PDAbraProperties.Builder()
.add(PDAbraConfig.ABRA_PROPERTYNAME_SOURCE_PAGE, PDAbraConfig.ABRA_PROPERTYVALUE_PAGE_LOGINTAKEOVER)
.create());
if (PDPreferencesUtils.getLoginUsesCount(getActivity()) <= 5) {
PDAbraLogEvent.log(PDAbraConfig.ABRA_EVENT_SHOW_LOGIN_TAKEOVER + " " + PDPreferencesUtils.getLoginUsesCount(getActivity()), new PDAbraProperties.Builder()
.add(PDAbraConfig.ABRA_PROPERTYNAME_SOURCE_PAGE, PDAbraConfig.ABRA_PROPERTYVALUE_PAGE_LOGINTAKEOVER)
.create());
}
registerCallBacks();
setupBackButton();
setupSocialButtons();
view.findViewById(R.id.pd_login_reward_layout).setVisibility(View.GONE);
if (rewards != null) {
final ImageView logoImageView = (ImageView) view.findViewById(R.id.pd_reward_star_image_view);
logoImageView.setVisibility(View.GONE);
for (int i = 0; i < rewards.size(); i++) {
if (rewards.get(i).getAction().equalsIgnoreCase("social_login")) {
final String imageUrl = rewards.get(i).getCoverImage();
if (imageUrl == null || imageUrl.isEmpty() || imageUrl.contains("default")) {
Glide.with(getActivity())
.load(R.drawable.pd_ui_star_icon)
.error(R.drawable.pd_ui_star_icon)
.dontAnimate()
.placeholder(R.drawable.pd_ui_star_icon)
.into(logoImageView);
} else {
Glide.with(getActivity())
.load(imageUrl)
.error(R.drawable.pd_ui_star_icon)
.dontAnimate()
.placeholder(R.drawable.pd_ui_star_icon)
.into(logoImageView);
}
// Reward Description
infoTextView.setText(rewards.get(i).getRules());
// Rules
titleTextView.setText(rewards.get(i).getDescription());
if (rewards.get(i).getRules() == null || rewards.get(i).getRules().isEmpty()) {
titleTextView.setVisibility(View.GONE);
}
view.findViewById(R.id.pd_login_reward_layout).setVisibility(View.VISIBLE);
break;
}
}
}
SharedPreferences sp = getActivity().getSharedPreferences("popdeem_prefs", Activity.MODE_PRIVATE);
int variationNumImages = sp.getInt("variation_num_images_login", 0);
TypedArray imagesArray = getResources().obtainTypedArray(R.array.pd_login_images);
ImageView infoImage = view.findViewById(R.id.pd_social_login_header_image_view);
// if (imagesArray.length() == 1) {
// int id = imagesArray.getResourceId(0, R.drawable.pd_social_login_header);
// loginImage.setImageResource(id);
// } else if (imagesArray.length() > 1) {
// int showNum = variationNumImages % imagesArray.length();
// loginImage.setImageResource(R.drawable.pd_social_login_image_1);
// }
if(!isInternetAvailable){
titleTextView.setText(R.string.pd_sociallogin_title_no_internet);
infoTextView.setText(R.string.pd_sociallogin_body_no_internet);
}
variationNumImages++;
SharedPreferences.Editor editor = sp.edit();
editor.putInt("variation_num_images_login", variationNumImages);
editor.commit();
// setTitleAndsBody(getActivity());
return view;
}
public void setTitleAndsBody(Context context) {
TextView titleView = view.findViewById(R.id.pd_social_rewards_title_text_view);
TextView infoView = view.findViewById(R.id.pd_social_rewards_info_text_view);
String title = "";
String body = "";
String[] stringsArrayTitle;
String[] stringsArrayBody;
stringsArrayTitle = context.getResources().getStringArray(R.array.pd_sociallogin_title);
stringsArrayBody = context.getResources().getStringArray(R.array.pd_sociallogin_body);
int numVar = stringsArrayBody.length;
SharedPreferences sp = context.getSharedPreferences("popdeem_prefs", Activity.MODE_PRIVATE);
int variationNum = sp.getInt("variation_num_login_text", 0);
if (numVar == 0) {
title = getActivity().getResources().getString(R.string.pd_social_login_tagline_text);
body = getActivity().getResources().getString(R.string.pd_social_login_body_text);
;
} else if (numVar == 1) {
title = stringsArrayTitle[0];
body = stringsArrayBody[0];
} else {
int showNum = variationNum % numVar;
title = stringsArrayTitle[showNum];
body = stringsArrayBody[showNum];
}
titleView.setText(title);
infoView.setText(body);
variationNum++;
SharedPreferences.Editor editor = sp.edit();
editor.putInt("variation_num_login_text", variationNum);
editor.commit();
}
@Override
public void onResume() {
super.onResume();
}
////////////////////////////////////////////////////
// Facebook Callbacks
//////////////////////////////////////////////////
private void registerCallBacks() {
mLocationManager = new PDLocationManager(getActivity());
mCallbackManager = CallbackManager.Factory.create();
//Facebook specific callback - starts location
LoginManager.getInstance().registerCallback(mCallbackManager, new FacebookCallback<LoginResult>() {
@Override
public void onSuccess(LoginResult loginResult) {
mProgressFacebook.setVisibility(View.VISIBLE);
progressView.setVisibility(View.VISIBLE);
mFacebookLoginButton.setText(R.string.pd_log_out_facebook_text);
PDLog.d(PDUISocialMultiLoginFragment_V2.class, "Facebook Login onSuccess(): " + loginResult.getAccessToken().getToken());
checkForLocationPermissionAndStartLocationManager();
}
@Override
public void onCancel() {
PDAbraLogEvent.log(PDAbraConfig.ABRA_EVENT_CANCELLED_FACEBOOK_LOGIN, null);
PDLog.d(PDUISocialLoginFragment.class, "Facebook Login onCancel()");
progressView.setVisibility(View.GONE);
new AlertDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.AlertDialogCustom))
.setTitle(R.string.pd_common_facebook_login_cancelled_title_text)
.setMessage(R.string.pd_common_facebook_login_cancelled_message_text)
.setPositiveButton(android.R.string.ok, null)
.create()
.show();
}
@Override
public void onError(FacebookException error) {
PDLog.d(PDUISocialLoginFragment.class, "Facebook Login onError(): " + error.getMessage());
progressView.setVisibility(View.GONE);
new AlertDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.AlertDialogCustom))
.setTitle(R.string.pd_common_sorry_text)
.setMessage(error.getMessage())
.setPositiveButton(android.R.string.ok, null)
.create()
.show();
}
});
}
private final PDAPICallback<PDUser> PD_API_CALLBACK = new PDAPICallback<PDUser>() {
@Override
public void success(PDUser user) {
PDLog.d(PDUISocialMultiLoginFragment_V2.class, "registered with Social A/C: " + user.toString());
PDUtils.updateSavedUser(user);
updateUser();
PDAbraLogEvent.log(PDAbraConfig.ABRA_EVENT_LOGIN, new PDAbraProperties.Builder()
.add("Source", "Login Takeover")
.create());
PDAbraLogEvent.onboardUser();
}
@Override
public void failure(int statusCode, Exception e) {
PDLog.d(PDUISocialMultiLoginFragment_V2.class, "failed register with social a/c: statusCode=" + statusCode + ", message=" + e.getMessage());
if (isFacebook)
LoginManager.getInstance().logOut();
mProgressFacebook.setVisibility(View.GONE);
progressView.setVisibility(View.GONE);
mFacebookLoginButton.setVisibility(View.VISIBLE);
mTwitterLoginButton.setVisibility(View.VISIBLE);
mInstaLoginButton.setVisibility(View.VISIBLE);
mFacebookLoginButton.setText(R.string.pd_log_in_with_facebook_text);
mTwitterLoginButton.setText(R.string.pd_log_in_with_twitter_text);
mInstaLoginButton.setText(R.string.pd_log_in_with_instagram_text);
new AlertDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.AlertDialogCustom))
.setTitle(R.string.pd_common_sorry_text)
.setMessage("An error occurred while registering. Please try again")
.setPositiveButton(android.R.string.ok, null)
.create()
.show();
}
};
////////////////////////////////////////////////////
// Social Login Buttons
///////////////////////////////////////////////////
private void setupSocialButtons() {
mFacebookLoginButton = (Button) view.findViewById(R.id.pd_facebook_login_button);
mFacebookLoginButton.setOnClickListener(this);
mTwitterLoginButton = (Button) view.findViewById(R.id.pd_twitter_login_button);
mTwitterLoginButton.setOnClickListener(this);
mInstaLoginButton = (Button) view.findViewById(R.id.pd_instagram_login_button);
mInstaLoginButton.setOnClickListener(this);
if(!isInternetAvailable){
mFacebookLoginButton.setVisibility(View.INVISIBLE);
mTwitterLoginButton.setVisibility(View.INVISIBLE);
mInstaLoginButton.setVisibility(View.INVISIBLE);
}
if (!PDSocialUtils.usesTwitter(getContext())) {
mTwitterLoginButton.setVisibility(View.GONE);
}
if (!PDSocialUtils.usesInstagram(getContext())) {
mInstaLoginButton.setVisibility(View.GONE);
}
if (!PDSocialUtils.usesFacebook()) {
mFacebookLoginButton.setVisibility(View.GONE);
}
mProgressFacebook = (ProgressBar) view.findViewById(R.id.pd_progress_bar);
progressView = (LinearLayout) view.findViewById(R.id.pd_progress_layout);
}
////////////////////////////////////////////////////
// Social Login Buttons Click Listeners
//////////////////////////////////////////////////
@Override
public void onClick(View v) {
final int ID = v.getId();
if (ID == R.id.pd_facebook_login_button) {
Log.i(TAG, "onClick: Facebook Login Selected");
isFacebook = true;
isTwitter = false;
isInstagram = false;
loginFacebook();
} else if (ID == R.id.pd_twitter_login_button) {
Log.i(TAG, "onClick: Twitter Login Selected");
isFacebook = false;
isTwitter = true;
isInstagram = false;
checkForLocationPermissionAndStartLocationManager();
} else if (ID == R.id.pd_instagram_login_button) {
Log.i(TAG, "onClick: Instagram Login Selected");
isFacebook = false;
isTwitter = false;
isInstagram = true;
checkForLocationPermissionAndStartLocationManager();
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
final AlertDialog.Builder alertadd = new AlertDialog.Builder(getContext());
LayoutInflater factory = LayoutInflater.from(getContext());
final View view = factory.inflate(R.layout.alert_pd_permission, null);
Button button = view.findViewById(R.id.pd_instagram_permission);
alertadd.setView(view);
final AlertDialog dialog = alertadd.create();
dialog.show();
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
dialog.dismiss();
}
});
}
});
}
}
////////////////////////////////////////////////////
// Social Login Methods //
//////////////////////////////////////////////////
/**
* Facebook
*/
private void loginFacebook() {
if (PDLocationManager.isGpsEnabled(getActivity())) {
mProgressFacebook.setVisibility(View.VISIBLE);
progressView.setVisibility(View.VISIBLE);
LoginManager.getInstance().logInWithReadPermissions(PDUISocialMultiLoginFragment_V2.this, Arrays.asList(PDSocialUtils.FACEBOOK_READ_PERMISSIONS));
} else {
new AlertDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.AlertDialogCustom))
.setTitle(R.string.pd_location_disabled_title_text)
.setMessage(R.string.pd_location_disabled_message_text)
.setNegativeButton(android.R.string.no, null)
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
PDLocationManager.startLocationSettingsActivity(getActivity());
}
})
.create().show();
}
}
/**
* Twitter
*/
private void loginTwitter() {
Log.i(TAG, "loginTwitter: Activity = " + getActivity().getClass().getSimpleName());
PDSocialUtils.loginWithTwitter(getActivity(), new Callback<TwitterSession>() {
@Override
public void success(Result<TwitterSession> result) {
if (result.data != null) {
Log.i(TAG, "success: Twitter Data present");
registerTwitterAccount(result.data);
} else {
progressView.setVisibility(View.GONE);
showGenericAlert();
}
// PDSocialUtils.client.cancelAuthorize();
// PDSocialUtils.client = null;
doingLogin = false;
}
@Override
public void failure(TwitterException e) {
if (getActivity() != null) {
progressView.setVisibility(View.GONE);
PDUIDialogUtils.showBasicOKAlertDialog(getActivity(), R.string.pd_claim_twitter_button_text, e.getMessage());
// PDSocialUtils.client.cancelAuthorize();
// PDSocialUtils.client = null;
doingLogin = false;
}
}
});
}
private void registerTwitterAccount(TwitterSession session) {
// PDAPIClient.instance().connectWithTwitterAccount(String.valueOf(session.getUserId()),
// session.getAuthToken().token, session.getAuthToken().secret, PD_API_CALLBACK);
PDAPIClient.instance().registerUserwithTwitterParams(session.getAuthToken().token,
session.getAuthToken().secret,
String.valueOf(session.getUserId()), PD_API_CALLBACK);
}
/**
* Instagram
*/
private void loginInstagram() {
if (PDSocialUtils.canUseInstagram()) {
mProgressFacebook.setVisibility(View.VISIBLE);
progressView.setVisibility(View.VISIBLE);
PDUIInstagramLoginFragment fragment = PDUIInstagramLoginFragment.newInstance(new PDUIInstagramLoginFragment.PDInstagramLoginCallback() {
@Override
public void loggedIn(PDInstagramResponse response) {
Log.i(TAG, "loggedIn: Instagram Logged In");
registerInstagramAccount(response);
doingLogin = false;
}
@Override
public void error(String message) {
mProgressFacebook.setVisibility(View.GONE);
progressView.setVisibility(View.GONE);
showGenericAlert();
doingLogin = false;
}
@Override
public void canceled() {
doingLogin = false;
mProgressFacebook.setVisibility(View.GONE);
progressView.setVisibility(View.GONE);
}
});
getFragmentManager().beginTransaction()
.add(android.R.id.content, fragment, PDUIInstagramLoginFragment.getName())
.addToBackStack(PDUIInstagramLoginFragment.getName())
.commit();
} else {
PDLog.w(getClass(), "Could not initialise Instagram");
}
}
private void registerInstagramAccount(PDInstagramResponse instagramResponse) {
PDAPIClient.instance().registerWithInstagramId(instagramResponse.getUserId(),
instagramResponse.getAccessToken(),
"","","",
PD_API_CALLBACK);
}
//////////////////////////////////////////////////////////////////////////////////
// Back Button - just closes the login fragment to continue with a Non-Social User
//////////////////////////////////////////////////////////////////////////////////
private void setupBackButton() {
Button backButton = (Button) view.findViewById(R.id.pd_social_login_back_button);
if(!isInternetAvailable){
backButton.setText(R.string.pd_connect_continue);
}
backButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (progressView.getVisibility() == View.GONE) {
PDAbraLogEvent.log(PDAbraConfig.ABRA_EVENT_CLICKED_CLOSE_LOGIN_TAKEOVER, new PDAbraProperties.Builder()
.add("Source", "Dismiss Button")
.create());
removeThisFragment();
if (callback != null) {
callback.onPDLoginCancel();
}
if (loginListener != null) {
loginListener.onPDLoginCancel();
}
}
}
});
}
////////////////////////////////////////////////////
// Pop this fragment off the stack //
//////////////////////////////////////////////////
public void removeThisFragment() {
if (getActivity() != null && getActivity().getSupportFragmentManager() != null && isAdded()) {
getActivity().getSupportFragmentManager().popBackStack(PDUISocialMultiLoginFragment_V2.class.getSimpleName(), FragmentManager.POP_BACK_STACK_INCLUSIVE);
}
}
////////////////////////////////////////////////////
// Location Methods //
//////////////////////////////////////////////////
private void checkForLocationPermissionAndStartLocationManager() {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED
|| ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
if (shouldShowRequestPermissionRationale(Manifest.permission.ACCESS_FINE_LOCATION)) {
new AlertDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.AlertDialogCustom))
.setTitle(R.string.pd_location_permission_title_text)
.setMessage(R.string.pd_location_permission_rationale_text)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
requestPermissions(new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
LOCATION_PERMISSION_REQUEST);
}
})
.create()
.show();
} else {
requestPermissions(new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
LOCATION_PERMISSION_REQUEST);
}
} else {
startLocationManagerAfterLogin();
}
}
private void startLocationManagerAfterLogin() {
//
// mFacebookLoginButton.setVisibility(View.INVISIBLE);
// mTwitterLoginButton.setVisibility(View.INVISIBLE);
// mInstaLoginButton.setVisibility(View.INVISIBLE);
if (handler == null) {
handler = new Handler();
}
if (runny == null) {
runny = new Runnable() {
@Override
public void run() {
doLogin();
}
};
}
handler.postDelayed(runny, 3000);
mLocationManager.startLocationUpdates(new LocationListener() {
@Override
public void onLocationChanged(Location location) {
if (location != null) {
handleLocationUpdate(location);
handler.removeCallbacks(runny);
}
}
});
}
private void handleLocationUpdate(final Location l) {
mLocationManager.stop();
location = l;
PDUtils.updateSavedUserLocation(location);
doLogin();
}
public void doLogin() {
if (!doingLogin) {
doingLogin = true;
if (isFacebook) {
PDAPIClient.instance().registerUserWithFacebook(AccessToken.getCurrentAccessToken().getToken(), AccessToken.getCurrentAccessToken().getUserId(), PD_API_CALLBACK);
doingLogin = false;
} else if (isTwitter) {
loginTwitter();
} else if (isInstagram) {
loginInstagram();
}
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode) {
case LOCATION_PERMISSION_REQUEST:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startLocationManagerAfterLogin();
} else {
// Permission was not given
PDLog.d(getClass(), "permission for location not granted");
PDAbraLogEvent.log(PDAbraConfig.ABRA_EVENT_DENIED_LOCATION, null);
if (mAskForPermission) {
mAskForPermission = false;
new AlertDialog.Builder(new ContextThemeWrapper(getActivity(), R.style.AlertDialogCustom))
.setTitle(R.string.pd_location_permission_title_text)
.setMessage(R.string.pd_location_permission_are_you_sure_text)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
requestPermissions(new String[]{Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION},
LOCATION_PERMISSION_REQUEST);
}
})
.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
LoginManager.getInstance().logOut();
removeThisFragment();
if (callback != null) {
callback.onPDLoginSuccess();
}
if (loginListener != null) {
loginListener.onPDLoginSuccess();
}
}
})
.create()
.show();
} else {
new Handler().post(new Runnable() {
@Override
public void run() {
LoginManager.getInstance().logOut();
removeThisFragment();
if (callback != null) {
callback.onPDLoginSuccess();
}
if (loginListener != null) {
loginListener.onPDLoginSuccess();
}
}
});
}
}
break;
}
}
////////////////////////////////////////////////////
// User Methods //
//////////////////////////////////////////////////
private void updateUser() {
if (!loggedIn) {
loggedIn = true;
Realm realm = Realm.getDefaultInstance();
PDRealmGCM gcm = realm.where(PDRealmGCM.class).findFirst();
String deviceToken = gcm == null ? "" : gcm.getRegistrationToken();
PDRealmUserDetails userDetails = realm.where(PDRealmUserDetails.class).findFirst();
if (userDetails == null) {
realm.close();
return;
}
String socialType = "";
if (isFacebook)
socialType = PDSocialUtils.SOCIAL_TYPE_FACEBOOK;
if (isTwitter)
socialType = PDSocialUtils.SOCIAL_TYPE_TWITTER;
if (isInstagram)
socialType = PDSocialUtils.SOCIAL_TYPE_INSTAGRAM;
String lat = "";
String lon = "";
if (location != null) {
lat = String.valueOf(location.getLatitude());
lon = String.valueOf(location.getLongitude());
}
PDAPIClient.instance().updateUserLocationAndDeviceToken(socialType, userDetails.getId(), deviceToken, lat, lon, new PDAPICallback<PDUser>() {
@Override
public void success(PDUser user) {
PDLog.d(PDUISocialMultiLoginFragment_V2.class, "update user: " + user);
PDUtils.updateSavedUser(user);
// Send broadcast to any registered receivers that user has logged in
if (getActivity() != null) {
// PopdeemSDK.showHome = false;
getActivity().sendBroadcast(new Intent(PDUIRewardsFragment.PD_LOGGED_IN_RECEIVER_FILTER));
}
// Update view
updateViewAfterLogin();
}
@Override
public void failure(int statusCode, Exception e) {
PDLog.d(PDUISocialMultiLoginFragment_V2.class, "failed update user: status=" + statusCode + ", e=" + e.getMessage());
// Send broadcast to any registered receivers that user has logged in
updateViewAfterLogin();
}
});
realm.close();
}
}
////////////////////////////////////////////////////
// Generic Methods //
//////////////////////////////////////////////////
private void showGenericAlert() {
if (getActivity() != null) {
PDUIDialogUtils.showBasicOKAlertDialog(getActivity(), R.string.pd_common_sorry_text, R.string.pd_common_something_wrong_text);
}
}
private void updateViewAfterLogin() {
// mProgressFacebook.setVisibility(View.GONE);
// mRewardsInfoTextView = (TextView) view.findViewById(R.id.pd_social_rewards_info_text_view);
// mRewardsInfoTextView.setText(R.string.pd_social_login_success_description_text);
//
// mFacebookLoginButton.setVisibility(View.GONE);
// mTwitterLoginButton.setVisibility(View.GONE);
// mInstaLoginButton.setVisibility(View.GONE);
//
// mContinueButton = (Button) view.findViewById(R.id.pd_social_continue_button);
// mContinueButton.setVisibility(View.VISIBLE);
// mContinueButton.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// removeThisFragment();
// }
// });
removeThisFragment();
if (callback != null) {
callback.onPDLoginSuccess();
}
if (loginListener != null) {
loginListener.onPDLoginSuccess();
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG, "onActivityResult");
if (PDSocialUtils.getTwitterAuthClient() != null) {
PDSocialUtils.getTwitterAuthClient().onActivityResult(requestCode, resultCode, data);
}
if (requestCode == TwitterAuthConfig.DEFAULT_AUTH_REQUEST_CODE) {
if (resultCode == 0) {
mProgressFacebook.setVisibility(View.GONE);
progressView.setVisibility(View.GONE);
} else {
mProgressFacebook.setVisibility(View.VISIBLE);
progressView.setVisibility(View.VISIBLE);
Log.i(TAG, "onActivityResult: twitter Auth Config");
TwitterLoginButton loginButton = new TwitterLoginButton(getActivity());
loginButton.onActivityResult(requestCode, resultCode, data);
}
}
mCallbackManager.onActivityResult(requestCode, resultCode, data);
super.onActivityResult(requestCode, resultCode, data);
}
public static String getName() {
return PDUISocialMultiLoginFragment_V2.class.getSimpleName();
}
/**
* Used to allow the client a hook into the SDK, in order to determine when the LoginFragments are detached
* allows for custom func client side
*
* @param context
*/
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof PDFragmentCommunicator) {
communicator = (PDFragmentCommunicator) context;
}
try {
loginListener = (PDLoginListener) getActivity();
} catch (ClassCastException e) {
loginListener = null;
}
}
@Override
public void onDetach() {
super.onDetach();
if (communicator != null) {
communicator.fragmentDetached();
}
}
////////////////////////////////////////////////////
// Generic Methods //
//////////////////////////////////////////////////
}
| |
/*
* Copyright 2001-2009 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.juddi.api.impl;
/**
* @author <a href="mailto:jfaath@apache.org">Jeff Faath</a>
* @author <a href="mailto:kstam@apache.org">Kurt T Stam</a>
* @author <a href="mailto:alexoree@apache.org">Alex O'Ree</a>
*/
import java.rmi.RemoteException;
import java.security.cert.CertificateException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import javax.xml.bind.JAXB;
import javax.xml.ws.Holder;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.juddi.Registry;
import org.apache.juddi.jaxb.EntityCreator;
import org.apache.juddi.v3.client.UDDIConstants;
import org.apache.juddi.v3.client.cryptor.DigSigUtil;
import org.apache.juddi.v3.tck.TckBusiness;
import org.apache.juddi.v3.tck.TckCommon;
import org.apache.juddi.v3.tck.TckFindEntity;
import org.apache.juddi.v3.tck.TckPublisher;
import org.apache.juddi.v3.tck.TckPublisherAssertion;
import org.apache.juddi.v3.tck.TckSecurity;
import org.apache.juddi.v3.tck.TckTModel;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.uddi.api_v3.AddPublisherAssertions;
import org.uddi.api_v3.AssertionStatusItem;
import org.uddi.api_v3.CompletionStatus;
import org.uddi.api_v3.DeletePublisherAssertions;
import org.uddi.api_v3.KeyedReference;
import org.uddi.api_v3.PublisherAssertion;
import org.uddi.v3_service.DispositionReportFaultMessage;
import org.uddi.v3_service.UDDISecurityPortType;
public class API_060_PublisherAssertionTest {
private static Log logger = LogFactory.getLog(API_060_PublisherAssertionTest.class);
private static API_010_PublisherTest api010 = new API_010_PublisherTest();
private static TckTModel tckTModel = new TckTModel(new UDDIPublicationImpl(), new UDDIInquiryImpl());
private static TckBusiness tckBusiness = new TckBusiness(new UDDIPublicationImpl(), new UDDIInquiryImpl());
private static TckPublisherAssertion tckAssertion = new TckPublisherAssertion(new UDDIPublicationImpl());
private static TckFindEntity tckFindEntity = new TckFindEntity(new UDDIInquiryImpl());
private static String authInfoJoe = null;
private static String authInfoSam = null;
private static String authInfoMary = null;
private static UDDIPublicationImpl pub = new UDDIPublicationImpl();
@BeforeClass
public static void setup() throws Exception {
Registry.start();
logger.debug("Getting auth token..");
try {
api010.saveJoePublisher();
api010.saveSamSyndicator();
UDDISecurityPortType security = new UDDISecurityImpl();
authInfoJoe = TckSecurity.getAuthToken(security, TckPublisher.getJoePublisherId(), TckPublisher.getJoePassword());
authInfoSam = TckSecurity.getAuthToken(security, TckPublisher.getSamPublisherId(), TckPublisher.getSamPassword());
authInfoMary = TckSecurity.getAuthToken(security, TckPublisher.getMaryPublisherId(), TckPublisher.getMaryPassword());
TckCommon.DumpAllTModelsOpInfo(authInfoJoe, new UDDIInquiryImpl());
String root = TckSecurity.getAuthToken(security, TckPublisher.getUDDIPublisherId(), TckPublisher.getUDDIPassword());
tckTModel.saveUDDIPublisherTmodel(root);
tckTModel.saveTmodels(root);
} catch (RemoteException e) {
System.out.println("the test failed, dumping ownership information for all tmodels....");
logger.error(e.getMessage(), e);
Assert.fail("Could not obtain authInfo token." + e.getMessage());
}
}
@AfterClass
public static void stopRegistry() throws ConfigurationException {
Registry.stop();
}
@Test
public void testJoepublisherToSamSyndicator() {
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
} finally {
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
}
}
/**
* This test should find no publisher assertions because we only save
* them from the joe publisher side.
*/
@Test
public void testFindNoAssertions() {
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckTModel.saveMaryPublisherTmodel(authInfoMary);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
tckBusiness.saveMaryPublisherBusiness(authInfoMary);
tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
tckAssertion.saveJoePublisherPublisherAssertion2(authInfoJoe);
tckFindEntity.findRelatedBusiness_sortByName(true);
tckFindEntity.findRelatedBusinessToKey(true);
tckFindEntity.findRelatedBusinessFromKey(true);
tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckAssertion.deleteJoePublisherPublisherAssertion2(authInfoJoe);
} finally {
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteMaryPublisherBusiness(authInfoMary);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
tckTModel.deleteMaryPublisherTmodel(authInfoMary);
}
}
/**
* This test should find 2 publisher assertions.
*/
@Test
public void testFindAssertions() {
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckTModel.saveMaryPublisherTmodel(authInfoMary);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
tckBusiness.saveMaryPublisherBusiness(authInfoMary);
tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
tckAssertion.saveJoePublisherPublisherAssertion2(authInfoJoe);
tckAssertion.saveSamPublisherPublisherAssertion(authInfoSam);
tckAssertion.saveMaryPublisherPublisherAssertion(authInfoMary);
tckFindEntity.findRelatedBusiness_sortByName(false);
tckFindEntity.findRelatedBusinessToKey(false);
tckFindEntity.findRelatedBusinessFromKey(false);
tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckAssertion.deleteJoePublisherPublisherAssertion2(authInfoJoe);
} finally {
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteMaryPublisherBusiness(authInfoMary);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
tckTModel.deleteMaryPublisherTmodel(authInfoMary);
}
}
/**
* covers
* <a href="https://issues.apache.org/jira/browse/JUDDI-908">JUDDI-908</a>
*
* @throws Exception
*/
@Test(expected = DispositionReportFaultMessage.class)
public void deleteAssertionNonowner() throws Exception {
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
DeletePublisherAssertions dp = new DeletePublisherAssertions();
dp.setAuthInfo(authInfoMary);
PublisherAssertion paIn = (PublisherAssertion) EntityCreator.buildFromDoc(TckPublisherAssertion.JOE_ASSERT_XML, "org.uddi.api_v3");
dp.getPublisherAssertion().add(paIn);
new UDDIPublicationImpl().deletePublisherAssertions(dp);
//
} finally {
tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
}
}
@Test
public void testSetPublisherAssertions() throws Exception {
//create 1/2 PA
//use Set with no inputs
//confirm all are deleted
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
Holder<List<PublisherAssertion>> x = new Holder<List<PublisherAssertion>>();
x.value = new ArrayList<PublisherAssertion>();
logger.info("Clearing all Joe's publisher assertions....");
pub.setPublisherAssertions(authInfoJoe, x);
logger.info("Clearing all Sam's publisher assertions....");
pub.setPublisherAssertions(authInfoSam, x);
logger.info("Confirming we're clear");
List<PublisherAssertion> before = pub.getPublisherAssertions(authInfoJoe);
Assert.assertNotNull(before);
Assert.assertTrue(before.isEmpty());
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
before = pub.getPublisherAssertions(authInfoSam);
Assert.assertNotNull(before);
Assert.assertTrue(before.isEmpty());
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
List<AssertionStatusItem> assertionStatusReport = pub.getAssertionStatusReport(authInfoJoe, null);
Assert.assertTrue(assertionStatusReport.isEmpty());
assertionStatusReport = pub.getAssertionStatusReport(authInfoSam, null);
Assert.assertTrue(assertionStatusReport.isEmpty());
logger.info("Saving 1/2 publisher assertion....");
List<PublisherAssertion> onehalfPA = tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
before = pub.getPublisherAssertions(authInfoJoe);
Assert.assertNotNull(before);
Assert.assertFalse(before.isEmpty());
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
//PublisherAssertion paIn = (PublisherAssertion)EntityCreator.buildFromDoc(TckPublisherAssertion.JOE_ASSERT_XML, "org.uddi.api_v3");
//dp.getPublisherAssertion().add(paIn);
x = new Holder<List<PublisherAssertion>>();
x.value = new ArrayList<PublisherAssertion>();
logger.info("Clearing all publisher assertions....");
pub.setPublisherAssertions(authInfoJoe, x);
System.out.println(x.value.size());
for (int i = 0; i < x.value.size(); i++) {
JAXB.marshal(x.value.get(i), System.out);
}
logger.info("Fetch all publisher assertions....there should be none");
List<PublisherAssertion> publisherAssertions = pub.getPublisherAssertions(authInfoJoe);
System.out.println(publisherAssertions.size());
for (int i = 0; i < publisherAssertions.size(); i++) {
JAXB.marshal(publisherAssertions.get(i), System.out);
}
Assert.assertNotNull(publisherAssertions);
Assert.assertTrue(publisherAssertions.isEmpty());
//
} finally {
//tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
}
}
@Test
public void testSetPublisherAssertions2() throws Exception {
//create 1/2 PA
//use Set with the same 1/2 PA
//confirm still present
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
List<PublisherAssertion> onehalfPA = tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
List<PublisherAssertion> before = pub.getPublisherAssertions(authInfoJoe);
Assert.assertNotNull(before);
Assert.assertFalse(before.isEmpty());
pub.setPublisherAssertions(authInfoJoe, new Holder<List<PublisherAssertion>>(onehalfPA));
List<PublisherAssertion> publisherAssertions = pub.getPublisherAssertions(authInfoJoe);
Assert.assertNotNull(publisherAssertions);
Assert.assertFalse(publisherAssertions.isEmpty());
//
} finally {
//tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
}
}
@Test
public void testSetPublisherAssertions3() throws Exception {
//create 1/2 PA
//use Set with a new PA
//confirm first PA is gone and the new PA exists
try {
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckTModel.saveMaryPublisherTmodel(authInfoMary);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
tckBusiness.saveMaryPublisherBusiness(authInfoMary);
Holder<List<PublisherAssertion>> x = new Holder<List<PublisherAssertion>>();
x.value = new ArrayList<PublisherAssertion>();
logger.info("Clearing all Joe's publisher assertions....");
pub.setPublisherAssertions(authInfoJoe, x);
logger.info("Clearing all Sam's publisher assertions....");
pub.setPublisherAssertions(authInfoSam, x);
logger.info("Clearing all Mary's publisher assertions....");
pub.setPublisherAssertions(authInfoMary, x);
logger.info("Confirming we're clear");
List<PublisherAssertion> before = pub.getPublisherAssertions(authInfoJoe);
Assert.assertNotNull(before);
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
Assert.assertTrue(before.isEmpty());
before = pub.getPublisherAssertions(authInfoSam);
Assert.assertNotNull(before);
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
Assert.assertTrue(before.isEmpty());
before = pub.getPublisherAssertions(authInfoMary);
Assert.assertNotNull(before);
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
Assert.assertTrue(before.isEmpty());
List<AssertionStatusItem> assertionStatusReport = pub.getAssertionStatusReport(authInfoJoe, null);
Assert.assertTrue(assertionStatusReport.isEmpty());
assertionStatusReport = pub.getAssertionStatusReport(authInfoSam, null);
Assert.assertTrue(assertionStatusReport.isEmpty());
assertionStatusReport = pub.getAssertionStatusReport(authInfoMary, null);
Assert.assertTrue(assertionStatusReport.isEmpty());
logger.info("Saving 1/2 publisher assertion....");
List<PublisherAssertion> onehalfPA = tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
before = pub.getPublisherAssertions(authInfoJoe);
Assert.assertNotNull(before);
Assert.assertFalse(before.isEmpty());
System.out.println(before.size());
for (int i = 0; i < before.size(); i++) {
JAXB.marshal(before.get(i), System.out);
}
//PublisherAssertion paIn = (PublisherAssertion)EntityCreator.buildFromDoc(TckPublisherAssertion.JOE_ASSERT_XML, "org.uddi.api_v3");
//dp.getPublisherAssertion().add(paIn);
x = new Holder<List<PublisherAssertion>>();
x.value = new ArrayList<PublisherAssertion>();
PublisherAssertion pa = new PublisherAssertion();
pa.setFromKey(TckBusiness.JOE_BUSINESS_KEY);
pa.setToKey(TckBusiness.MARY_BUSINESS_KEY);
pa.setKeyedReference(new KeyedReference(UDDIConstants.RELATIONSHIPS, "parent-child", "child"));
x.value.add(pa);
logger.info("Using set to clear existing and add a new publisher assertion....");
pub.setPublisherAssertions(authInfoJoe, x);
System.out.println(x.value.size());
for (int i = 0; i < x.value.size(); i++) {
JAXB.marshal(x.value.get(i), System.out);
}
logger.info("Fetch all publisher assertions....there should be 1");
List<PublisherAssertion> publisherAssertions = pub.getPublisherAssertions(authInfoJoe);
System.out.println(publisherAssertions.size());
for (int i = 0; i < publisherAssertions.size(); i++) {
JAXB.marshal(publisherAssertions.get(i), System.out);
}
Assert.assertEquals(publisherAssertions.get(0).getFromKey(), pa.getFromKey());
Assert.assertEquals(publisherAssertions.get(0).getToKey(), pa.getToKey());
Assert.assertEquals(publisherAssertions.get(0).getKeyedReference().getKeyName(), pa.getKeyedReference().getKeyName());
Assert.assertEquals(publisherAssertions.get(0).getKeyedReference().getKeyValue(), pa.getKeyedReference().getKeyValue());
Assert.assertEquals(publisherAssertions.get(0).getKeyedReference().getTModelKey(), pa.getKeyedReference().getTModelKey());
//
} finally {
//tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckBusiness.deleteMaryPublisherBusiness(authInfoMary);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
tckTModel.deleteMaryPublisherTmodel(authInfoMary);
}
}
DigSigUtil ds;
void SetCertStoreSettigns() {
ds.put(DigSigUtil.SIGNATURE_KEYSTORE_FILE, "./src/test/resources/keystore.jks");
ds.put(DigSigUtil.SIGNATURE_KEYSTORE_FILETYPE, "JKS");
ds.put(DigSigUtil.SIGNATURE_KEYSTORE_FILE_PASSWORD, "Test");
ds.put(DigSigUtil.SIGNATURE_KEYSTORE_KEY_ALIAS, "Test");
ds.put(DigSigUtil.TRUSTSTORE_FILE, "./src/test/resources/truststore.jks");
ds.put(DigSigUtil.TRUSTSTORE_FILETYPE, "JKS");
ds.put(DigSigUtil.TRUSTSTORE_FILE_PASSWORD, "Test");
}
void Default() throws CertificateException {
ds = new DigSigUtil();
SetCertStoreSettigns();
ds.put(DigSigUtil.SIGNATURE_OPTION_CERT_INCLUSION_BASE64, "true");
}
@Test
public void testPublisherAssertionSignatures() throws Exception {
try {
Default();
tckTModel.saveJoePublisherTmodel(authInfoJoe);
tckTModel.saveSamSyndicatorTmodel(authInfoSam);
tckBusiness.saveJoePublisherBusiness(authInfoJoe);
tckBusiness.saveSamSyndicatorBusiness(authInfoSam);
AddPublisherAssertions ap = new AddPublisherAssertions();
ap.setAuthInfo(authInfoJoe);
PublisherAssertion paIn = (PublisherAssertion) EntityCreator.buildFromDoc(TckPublisherAssertion.JOE_ASSERT_XML, "org.uddi.api_v3");
paIn = ds.signUddiEntity(paIn);
ap.getPublisherAssertion().add(paIn);
Assert.assertFalse(paIn.getSignature().isEmpty());
pub.addPublisherAssertions(ap);
List<PublisherAssertion> onehalfPA = tckAssertion.saveJoePublisherPublisherAssertion(authInfoJoe);
Assert.assertNotNull(onehalfPA);
Assert.assertFalse(onehalfPA.get(0).getSignature().isEmpty());
Assert.assertFalse(onehalfPA.isEmpty());
Assert.assertNotNull(onehalfPA);
Assert.assertFalse(onehalfPA.get(0).getSignature().isEmpty());
Assert.assertFalse(onehalfPA.isEmpty());
Assert.assertEquals(paIn.getSignature().size(),onehalfPA.get(0).getSignature().size());
Assert.assertEquals(paIn.getSignature().get(0).getId(),onehalfPA.get(0).getSignature().get(0).getId());
Assert.assertEquals(paIn.getSignature().get(0).getKeyInfo().getId(),onehalfPA.get(0).getSignature().get(0).getKeyInfo().getId());
Assert.assertEquals(paIn.getSignature().get(0).getKeyInfo().getContent().size(),onehalfPA.get(0).getSignature().get(0).getKeyInfo().getContent().size());
Assert.assertEquals(paIn.getSignature().get(0).getSignedInfo().getCanonicalizationMethod().getAlgorithm(),onehalfPA.get(0).getSignature().get(0).getSignedInfo().getCanonicalizationMethod().getAlgorithm());
Assert.assertEquals(paIn.getSignature().get(0).getSignedInfo().getId(),onehalfPA.get(0).getSignature().get(0).getSignedInfo().getId());
Assert.assertEquals(paIn.getSignature().get(0).getSignedInfo().getReference().size(),onehalfPA.get(0).getSignature().get(0).getSignedInfo().getReference().size());
AtomicReference<String> outmsg=new AtomicReference<String>();
boolean success=ds.verifySignedUddiEntity(onehalfPA.get(0), outmsg);
Assert.assertTrue(outmsg.get(), success);
//
} finally {
//tckAssertion.deleteJoePublisherPublisherAssertion(authInfoJoe);
tckBusiness.deleteJoePublisherBusiness(authInfoJoe);
tckBusiness.deleteSamSyndicatorBusiness(authInfoSam);
tckTModel.deleteJoePublisherTmodel(authInfoJoe);
tckTModel.deleteSamSyndicatorTmodel(authInfoSam);
}
}
}
| |
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.thirdeye.datasource;
import com.google.common.collect.ArrayListMultimap;
import com.linkedin.thirdeye.datasource.pinot.PinotThirdEyeDataSource;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.base.MoreObjects;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
import com.linkedin.thirdeye.api.TimeGranularity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Request object containing all information for a {@link ThirdEyeDataSource} to retrieve data. Request
* objects can be constructed via {@link ThirdEyeRequestBuilder}.
*/
public class ThirdEyeRequest {
private final List<MetricFunction> metricFunctions;
private final DateTime startTime;
private final DateTime endTime;
private final Multimap<String, String> filterSet;
// TODO - what kind of advanced expressions do we want here? This could potentially force code to
// depend on a specific client implementation
private final String filterClause;
private final List<String> groupByDimensions;
private final TimeGranularity groupByTimeGranularity;
private final List<String> metricNames;
private final String dataSource;
private final String requestReference;
private final int limit;
private ThirdEyeRequest(String requestReference, ThirdEyeRequestBuilder builder) {
this.requestReference = requestReference;
this.metricFunctions = new ArrayList<>(builder.metricFunctions);
this.startTime = builder.startTime;
this.endTime = builder.endTime;
this.filterSet = ArrayListMultimap.create(builder.filterSet);
this.filterClause = builder.filterClause;
this.groupByDimensions = new ArrayList<>(builder.groupBy);
this.groupByTimeGranularity = builder.groupByTimeGranularity;
this.dataSource = builder.dataSource;
metricNames = new ArrayList<>();
for (MetricFunction metric : metricFunctions) {
metricNames.add(metric.toString());
}
this.limit = builder.limit;
}
public static ThirdEyeRequestBuilder newBuilder() {
return new ThirdEyeRequestBuilder();
}
public String getRequestReference() {
return requestReference;
}
public List<MetricFunction> getMetricFunctions() {
return metricFunctions;
}
public List<String> getMetricNames() {
return metricNames;
}
@JsonIgnore
public TimeGranularity getGroupByTimeGranularity() {
return groupByTimeGranularity;
}
public DateTime getStartTimeInclusive() {
return startTime;
}
public DateTime getEndTimeExclusive() {
return endTime;
}
public Multimap<String, String> getFilterSet() {
return filterSet;
}
public String getFilterClause() {
// TODO check if this is being used?
return filterClause;
}
public List<String> getGroupBy() {
return groupByDimensions;
}
public String getDataSource() {
return dataSource;
}
public int getLimit() {
return limit;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ThirdEyeRequest)) {
return false;
}
ThirdEyeRequest that = (ThirdEyeRequest) o;
return Objects.equals(metricFunctions, that.metricFunctions) && Objects.equals(startTime, that.startTime) && Objects
.equals(endTime, that.endTime) && Objects.equals(filterSet, that.filterSet) && Objects.equals(filterClause,
that.filterClause) && Objects.equals(groupByDimensions, that.groupByDimensions) && Objects.equals(
groupByTimeGranularity, that.groupByTimeGranularity) && Objects.equals(metricNames, that.metricNames) && Objects
.equals(dataSource, that.dataSource) && Objects.equals(requestReference, that.requestReference) &&
Objects.equals(requestReference, that.requestReference);
}
@Override
public int hashCode() {
return Objects.hash(metricFunctions, startTime, endTime, filterSet, filterClause, groupByDimensions,
groupByTimeGranularity, metricNames, dataSource, requestReference, limit);
}
@Override
public String toString() {
return "ThirdEyeRequest{" + "metricFunctions=" + metricFunctions + ", startTime=" + startTime + ", endTime="
+ endTime + ", filterSet=" + filterSet + ", filterClause='" + filterClause + '\'' + ", groupByDimensions="
+ groupByDimensions + ", groupByTimeGranularity=" + groupByTimeGranularity + ", metricNames=" + metricNames
+ ", dataSource='" + dataSource + '\'' + ", requestReference='" + requestReference + '\'' +
", limit='" + limit + '\'' + '}';
}
public static class ThirdEyeRequestBuilder {
private static final Logger LOG = LoggerFactory.getLogger(ThirdEyeRequestBuilder.class);
private List<MetricFunction> metricFunctions;
private DateTime startTime;
private DateTime endTime;
private final Multimap<String, String> filterSet;
private String filterClause;
private final List<String> groupBy;
private TimeGranularity groupByTimeGranularity;
private String dataSource = PinotThirdEyeDataSource.DATA_SOURCE_NAME;
private int limit;
public ThirdEyeRequestBuilder() {
this.filterSet = LinkedListMultimap.create();
this.groupBy = new ArrayList<String>();
metricFunctions = new ArrayList<>();
}
public ThirdEyeRequestBuilder setDatasets(List<String> datasets) {
return this;
}
public ThirdEyeRequestBuilder addMetricFunction(MetricFunction metricFunction) {
metricFunctions.add(metricFunction);
return this;
}
public ThirdEyeRequestBuilder setStartTimeInclusive(long startTimeMillis) {
this.startTime = new DateTime(startTimeMillis, DateTimeZone.UTC);
return this;
}
public ThirdEyeRequestBuilder setStartTimeInclusive(DateTime startTime) {
this.startTime = startTime;
return this;
}
public ThirdEyeRequestBuilder setEndTimeExclusive(long endTimeMillis) {
this.endTime = new DateTime(endTimeMillis, DateTimeZone.UTC);
return this;
}
public ThirdEyeRequestBuilder setEndTimeExclusive(DateTime endTime) {
this.endTime = endTime;
return this;
}
public ThirdEyeRequestBuilder addFilterValue(String column, String... values) {
for (String value : values) {
this.filterSet.put(column, value);
}
return this;
}
public ThirdEyeRequestBuilder setFilterClause(String filterClause) {
this.filterClause = filterClause;
return this;
}
public ThirdEyeRequestBuilder setFilterSet(Multimap<String, String> filterSet) {
if (filterSet != null) {
this.filterSet.clear();
this.filterSet.putAll(filterSet);
}
return this;
}
/** Removes any existing groupings and adds the provided names. */
public ThirdEyeRequestBuilder setGroupBy(Collection<String> names) {
this.groupBy.clear();
addGroupBy(names);
return this;
}
/** See {@link #setGroupBy(Collection)} */
public ThirdEyeRequestBuilder setGroupBy(String... names) {
return setGroupBy(Arrays.asList(names));
}
/** Adds the provided names to the existing groupings. */
public ThirdEyeRequestBuilder addGroupBy(Collection<String> names) {
if (names != null) {
for (String name : names) {
if (name != null) {
this.groupBy.add(name);
}
}
}
return this;
}
/** See {@link ThirdEyeRequestBuilder#addGroupBy(Collection)} */
public ThirdEyeRequestBuilder addGroupBy(String... names) {
return addGroupBy(Arrays.asList(names));
}
public ThirdEyeRequestBuilder setGroupByTimeGranularity(TimeGranularity timeGranularity) {
groupByTimeGranularity = timeGranularity;
return this;
}
public ThirdEyeRequestBuilder setMetricFunctions(List<MetricFunction> metricFunctions) {
this.metricFunctions = metricFunctions;
return this;
}
public ThirdEyeRequestBuilder setDataSource(String dataSource) {
this.dataSource = dataSource;
return this;
}
public ThirdEyeRequestBuilder setLimit(int limit) {
this.limit = limit;
return this;
}
public ThirdEyeRequest build(String requestReference) {
return new ThirdEyeRequest(requestReference, this);
}
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.wso2.developerstudio.eclipse.gmf.esb.ConditionalRouteBranch;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.EvaluatorExpressionProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Conditional Route Branch</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ConditionalRouteBranchImpl#isBreakAfterRoute <em>Break After Route</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ConditionalRouteBranchImpl#getEvaluatorExpression <em>Evaluator Expression</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.gmf.esb.impl.ConditionalRouteBranchImpl#getTargetSequence <em>Target Sequence</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class ConditionalRouteBranchImpl extends EsbNodeImpl implements ConditionalRouteBranch {
/**
* The default value of the '{@link #isBreakAfterRoute() <em>Break After Route</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isBreakAfterRoute()
* @generated
* @ordered
*/
protected static final boolean BREAK_AFTER_ROUTE_EDEFAULT = false;
/**
* The cached value of the '{@link #isBreakAfterRoute() <em>Break After Route</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isBreakAfterRoute()
* @generated
* @ordered
*/
protected boolean breakAfterRoute = BREAK_AFTER_ROUTE_EDEFAULT;
/**
* The cached value of the '{@link #getEvaluatorExpression() <em>Evaluator Expression</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEvaluatorExpression()
* @generated
* @ordered
*/
protected EvaluatorExpressionProperty evaluatorExpression;
/**
* The cached value of the '{@link #getTargetSequence() <em>Target Sequence</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTargetSequence()
* @generated
* @ordered
*/
protected RegistryKeyProperty targetSequence;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ConditionalRouteBranchImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return EsbPackage.Literals.CONDITIONAL_ROUTE_BRANCH;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isBreakAfterRoute() {
return breakAfterRoute;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setBreakAfterRoute(boolean newBreakAfterRoute) {
boolean oldBreakAfterRoute = breakAfterRoute;
breakAfterRoute = newBreakAfterRoute;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.CONDITIONAL_ROUTE_BRANCH__BREAK_AFTER_ROUTE, oldBreakAfterRoute, breakAfterRoute));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EvaluatorExpressionProperty getEvaluatorExpression() {
return evaluatorExpression;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetEvaluatorExpression(EvaluatorExpressionProperty newEvaluatorExpression, NotificationChain msgs) {
EvaluatorExpressionProperty oldEvaluatorExpression = evaluatorExpression;
evaluatorExpression = newEvaluatorExpression;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION, oldEvaluatorExpression, newEvaluatorExpression);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setEvaluatorExpression(EvaluatorExpressionProperty newEvaluatorExpression) {
if (newEvaluatorExpression != evaluatorExpression) {
NotificationChain msgs = null;
if (evaluatorExpression != null)
msgs = ((InternalEObject)evaluatorExpression).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION, null, msgs);
if (newEvaluatorExpression != null)
msgs = ((InternalEObject)newEvaluatorExpression).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION, null, msgs);
msgs = basicSetEvaluatorExpression(newEvaluatorExpression, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION, newEvaluatorExpression, newEvaluatorExpression));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public RegistryKeyProperty getTargetSequence() {
return targetSequence;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetTargetSequence(RegistryKeyProperty newTargetSequence, NotificationChain msgs) {
RegistryKeyProperty oldTargetSequence = targetSequence;
targetSequence = newTargetSequence;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE, oldTargetSequence, newTargetSequence);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTargetSequence(RegistryKeyProperty newTargetSequence) {
if (newTargetSequence != targetSequence) {
NotificationChain msgs = null;
if (targetSequence != null)
msgs = ((InternalEObject)targetSequence).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE, null, msgs);
if (newTargetSequence != null)
msgs = ((InternalEObject)newTargetSequence).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE, null, msgs);
msgs = basicSetTargetSequence(newTargetSequence, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE, newTargetSequence, newTargetSequence));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION:
return basicSetEvaluatorExpression(null, msgs);
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE:
return basicSetTargetSequence(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__BREAK_AFTER_ROUTE:
return isBreakAfterRoute();
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION:
return getEvaluatorExpression();
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE:
return getTargetSequence();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__BREAK_AFTER_ROUTE:
setBreakAfterRoute((Boolean)newValue);
return;
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION:
setEvaluatorExpression((EvaluatorExpressionProperty)newValue);
return;
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE:
setTargetSequence((RegistryKeyProperty)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__BREAK_AFTER_ROUTE:
setBreakAfterRoute(BREAK_AFTER_ROUTE_EDEFAULT);
return;
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION:
setEvaluatorExpression((EvaluatorExpressionProperty)null);
return;
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE:
setTargetSequence((RegistryKeyProperty)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__BREAK_AFTER_ROUTE:
return breakAfterRoute != BREAK_AFTER_ROUTE_EDEFAULT;
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__EVALUATOR_EXPRESSION:
return evaluatorExpression != null;
case EsbPackage.CONDITIONAL_ROUTE_BRANCH__TARGET_SEQUENCE:
return targetSequence != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (breakAfterRoute: ");
result.append(breakAfterRoute);
result.append(')');
return result.toString();
}
} //ConditionalRouteBranchImpl
| |
package org.hl7.fhir.r4.hapi.ctx;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.*;
import org.apache.commons.io.Charsets;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.*;
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
import org.hl7.fhir.r4.model.CodeSystem.CodeSystemContentMode;
import org.hl7.fhir.r4.model.CodeSystem.ConceptDefinitionComponent;
import org.hl7.fhir.r4.model.ValueSet.*;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import ca.uhn.fhir.context.FhirContext;
public class DefaultProfileValidationSupport implements IValidationSupport {
private static final String URL_PREFIX_VALUE_SET = "http://hl7.org/fhir/ValueSet/";
private static final String URL_PREFIX_STRUCTURE_DEFINITION = "http://hl7.org/fhir/StructureDefinition/";
private static final String URL_PREFIX_STRUCTURE_DEFINITION_BASE = "http://hl7.org/fhir/";
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(DefaultProfileValidationSupport.class);
private Map<String, CodeSystem> myCodeSystems;
private Map<String, StructureDefinition> myStructureDefinitions;
private Map<String, ValueSet> myValueSets;
@Override
public ValueSetExpansionComponent expandValueSet(FhirContext theContext, ConceptSetComponent theInclude) {
ValueSetExpansionComponent retVal = new ValueSetExpansionComponent();
Set<String> wantCodes = new HashSet<String>();
for (ConceptReferenceComponent next : theInclude.getConcept()) {
wantCodes.add(next.getCode());
}
CodeSystem system = fetchCodeSystem(theContext, theInclude.getSystem());
for (ConceptDefinitionComponent next : system.getConcept()) {
if (wantCodes.isEmpty() || wantCodes.contains(next.getCode())) {
retVal.addContains().setSystem(theInclude.getSystem()).setCode(next.getCode()).setDisplay(next.getDisplay());
}
}
return retVal;
}
@Override
public List<StructureDefinition> fetchAllStructureDefinitions(FhirContext theContext) {
return new ArrayList<StructureDefinition>(provideStructureDefinitionMap(theContext).values());
}
@Override
public CodeSystem fetchCodeSystem(FhirContext theContext, String theSystem) {
return (CodeSystem) fetchCodeSystemOrValueSet(theContext, theSystem, true);
}
private DomainResource fetchCodeSystemOrValueSet(FhirContext theContext, String theSystem, boolean codeSystem) {
synchronized (this) {
Map<String, CodeSystem> codeSystems = myCodeSystems;
Map<String, ValueSet> valueSets = myValueSets;
if (codeSystems == null || valueSets == null) {
codeSystems = new HashMap<String, CodeSystem>();
valueSets = new HashMap<String, ValueSet>();
loadCodeSystems(theContext, codeSystems, valueSets, "/org/hl7/fhir/r4/model/valueset/valuesets.xml");
loadCodeSystems(theContext, codeSystems, valueSets, "/org/hl7/fhir/r4/model/valueset/v2-tables.xml");
loadCodeSystems(theContext, codeSystems, valueSets, "/org/hl7/fhir/r4/model/valueset/v3-codesystems.xml");
myCodeSystems = codeSystems;
myValueSets = valueSets;
}
if (codeSystem) {
return codeSystems.get(theSystem);
} else {
return valueSets.get(theSystem);
}
}
}
@SuppressWarnings("unchecked")
@Override
public <T extends IBaseResource> T fetchResource(FhirContext theContext, Class<T> theClass, String theUri) {
Validate.notBlank(theUri, "theUri must not be null or blank");
if (theClass.equals(StructureDefinition.class)) {
return (T) fetchStructureDefinition(theContext, theUri);
}
if (theClass.equals(ValueSet.class) || theUri.startsWith(URL_PREFIX_VALUE_SET)) {
return (T) fetchValueSet(theContext, theUri);
}
return null;
}
@Override
public StructureDefinition fetchStructureDefinition(FhirContext theContext, String theUrl) {
String url = theUrl;
if (url.startsWith(URL_PREFIX_STRUCTURE_DEFINITION)) {
// no change
} else if (url.indexOf('/') == -1) {
url = URL_PREFIX_STRUCTURE_DEFINITION + url;
} else if (StringUtils.countMatches(url, '/') == 1) {
url = URL_PREFIX_STRUCTURE_DEFINITION_BASE + url;
}
return provideStructureDefinitionMap(theContext).get(url);
}
ValueSet fetchValueSet(FhirContext theContext, String theSystem) {
return (ValueSet) fetchCodeSystemOrValueSet(theContext, theSystem, false);
}
public void flush() {
myCodeSystems = null;
myStructureDefinitions = null;
}
@Override
public boolean isCodeSystemSupported(FhirContext theContext, String theSystem) {
CodeSystem cs = fetchCodeSystem(theContext, theSystem);
return cs != null && cs.getContent() != CodeSystemContentMode.NOTPRESENT;
}
private void loadCodeSystems(FhirContext theContext, Map<String, CodeSystem> theCodeSystems, Map<String, ValueSet> theValueSets, String theClasspath) {
ourLog.info("Loading CodeSystem/ValueSet from classpath: {}", theClasspath);
InputStream valuesetText = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath);
if (valuesetText != null) {
InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);
Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
for (BundleEntryComponent next : bundle.getEntry()) {
if (next.getResource() instanceof CodeSystem) {
CodeSystem nextValueSet = (CodeSystem) next.getResource();
nextValueSet.getText().setDivAsString("");
String system = nextValueSet.getUrl();
if (isNotBlank(system)) {
theCodeSystems.put(system, nextValueSet);
}
} else if (next.getResource() instanceof ValueSet) {
ValueSet nextValueSet = (ValueSet) next.getResource();
nextValueSet.getText().setDivAsString("");
String system = nextValueSet.getUrl();
if (isNotBlank(system)) {
theValueSets.put(system, nextValueSet);
}
}
}
} else {
ourLog.warn("Unable to load resource: {}", theClasspath);
}
}
private void loadStructureDefinitions(FhirContext theContext, Map<String, StructureDefinition> theCodeSystems, String theClasspath) {
ourLog.info("Loading structure definitions from classpath: {}", theClasspath);
InputStream valuesetText = DefaultProfileValidationSupport.class.getResourceAsStream(theClasspath);
if (valuesetText != null) {
InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);
Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
for (BundleEntryComponent next : bundle.getEntry()) {
if (next.getResource() instanceof StructureDefinition) {
StructureDefinition nextSd = (StructureDefinition) next.getResource();
nextSd.getText().setDivAsString("");
String system = nextSd.getUrl();
if (isNotBlank(system)) {
theCodeSystems.put(system, nextSd);
}
}
}
} else {
ourLog.warn("Unable to load resource: {}", theClasspath);
}
}
private Map<String, StructureDefinition> provideStructureDefinitionMap(FhirContext theContext) {
Map<String, StructureDefinition> structureDefinitions = myStructureDefinitions;
if (structureDefinitions == null) {
structureDefinitions = new HashMap<String, StructureDefinition>();
loadStructureDefinitions(theContext, structureDefinitions, "/org/hl7/fhir/r4/model/profile/profiles-resources.xml");
loadStructureDefinitions(theContext, structureDefinitions, "/org/hl7/fhir/r4/model/profile/profiles-types.xml");
loadStructureDefinitions(theContext, structureDefinitions, "/org/hl7/fhir/r4/model/profile/profiles-others.xml");
myStructureDefinitions = structureDefinitions;
}
return structureDefinitions;
}
@Override
public CodeValidationResult validateCode(FhirContext theContext, String theCodeSystem, String theCode, String theDisplay) {
CodeSystem cs = fetchCodeSystem(theContext, theCodeSystem);
if (cs != null) {
boolean caseSensitive = true;
if (cs.hasCaseSensitive()) {
caseSensitive = cs.getCaseSensitive();
}
CodeValidationResult retVal = testIfConceptIsInList(theCode, cs.getConcept(), caseSensitive);
if (retVal != null) {
return retVal;
}
}
return new CodeValidationResult(IssueSeverity.WARNING, "Unknown code: " + theCodeSystem + " / " + theCode);
}
private CodeValidationResult testIfConceptIsInList(String theCode, List<ConceptDefinitionComponent> conceptList, boolean theCaseSensitive) {
String code = theCode;
if (theCaseSensitive == false) {
code = code.toUpperCase();
}
return testIfConceptIsInListInner(conceptList, theCaseSensitive, code);
}
private CodeValidationResult testIfConceptIsInListInner(List<ConceptDefinitionComponent> conceptList, boolean theCaseSensitive, String code) {
CodeValidationResult retVal = null;
for (ConceptDefinitionComponent next : conceptList) {
String nextCandidate = next.getCode();
if (theCaseSensitive == false) {
nextCandidate = nextCandidate.toUpperCase();
}
if (nextCandidate.equals(code)) {
retVal = new CodeValidationResult(next);
break;
}
// recurse
retVal = testIfConceptIsInList(code, next.getConcept(), theCaseSensitive);
if (retVal != null) {
break;
}
}
return retVal;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.apps.logstream;
import java.util.*;
import java.util.Map.Entry;
import javax.validation.constraints.NotNull;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datatorrent.lib.algo.TopN;
import com.datatorrent.lib.codec.KryoSerializableStreamCodec;
import com.datatorrent.lib.logs.DimensionObject;
import com.datatorrent.api.Context.OperatorContext;
import com.datatorrent.api.DefaultPartition;
import com.datatorrent.api.Partitioner;
import com.datatorrent.api.StreamCodec;
import com.datatorrent.apps.logstream.PropertyRegistry.LogstreamPropertyRegistry;
import com.datatorrent.apps.logstream.PropertyRegistry.PropertyRegistry;
import com.datatorrent.netlet.util.DTThrowable;
/**
* Partitionable topN operator.
* Each partition serves specific filter as defined in the partition.
*
* @since 0.9.4
*/
public class LogstreamTopN extends TopN<String, DimensionObject<String>> implements Partitioner<LogstreamTopN>
{
private transient boolean firstTuple = true;
private final HashMap<String, Number> recordType = new HashMap<String, Number>();
private static final Logger logger = LoggerFactory.getLogger(LogstreamTopN.class);
@NotNull
private PropertyRegistry<String> registry;
/**
* supply the registry object which is used to store and retrieve meta information about each tuple
*
* @param registry
*/
public void setRegistry(PropertyRegistry<String> registry)
{
this.registry = registry;
}
@Override
public void setup(OperatorContext context)
{
super.setup(context);
LogstreamPropertyRegistry.setInstance(registry);
}
@Override
public void processTuple(Map<String, DimensionObject<String>> tuple)
{
if (firstTuple) {
extractType(tuple);
firstTuple = false;
}
Iterator<Entry<String, DimensionObject<String>>> iterator = tuple.entrySet().iterator();
String randomKey = null;
if (iterator.hasNext()) {
randomKey = iterator.next().getKey();
}
// what happens if randomKey is null?
String[] split = randomKey.split("\\|");
Number receivedFilter = new Integer(split[3]);
Number expectedFilter = recordType.get(LogstreamUtil.FILTER);
if (!receivedFilter.equals(expectedFilter)) {
logger.error("Unexpected tuple");
logger.error("expected filter = {} received = {}", expectedFilter, receivedFilter);
}
super.processTuple(tuple);
}
@Override
protected StreamCodec<Map<String, DimensionObject<String>>> getStreamCodec()
{
return new LogstreamTopNStreamCodec();
}
/**
* <b>Note:</b> This partitioner does not support parallel partitioning.<br/><br/>
* {@inheritDoc}
*/
@Override
public Collection<Partition<LogstreamTopN>> definePartitions(Collection<Partition<LogstreamTopN>> partitions, PartitioningContext context)
{
ArrayList<Partition<LogstreamTopN>> newPartitions = new ArrayList<Partition<LogstreamTopN>>();
String[] filters = registry.list(LogstreamUtil.FILTER);
int partitionSize;
if (partitions.size() == 1) {
// initial partitions; functional partitioning
partitionSize = filters.length;
}
else {
// redo partitions; double the partitions
partitionSize = partitions.size() * 2;
}
for (int i = 0; i < partitionSize; i++) {
try {
LogstreamTopN logstreamTopN = new LogstreamTopN();
logstreamTopN.registry = this.registry;
logstreamTopN.setN(this.getN());
Partition<LogstreamTopN> partition = new DefaultPartition<LogstreamTopN>(logstreamTopN);
newPartitions.add(partition);
}
catch (Throwable ex) {
DTThrowable.rethrow(ex);
}
}
int partitionBits = (Integer.numberOfLeadingZeros(0) - Integer.numberOfLeadingZeros(partitionSize / filters.length - 1));
int partitionMask = 0;
if (partitionBits > 0) {
partitionMask = -1 >>> (Integer.numberOfLeadingZeros(-1)) - partitionBits;
}
partitionMask = (partitionMask << 16) | 0xffff; // right most 16 bits used for functional partitioning
for (int i = 0; i < newPartitions.size(); i++) {
Partition<LogstreamTopN> partition = newPartitions.get(i);
String partitionVal = filters[i % filters.length];
int bits = i / filters.length;
int filterId = registry.getIndex(LogstreamUtil.FILTER, partitionVal);
filterId = 0xffff & filterId; // clear out first 16 bits
int partitionKey = (bits << 16) | filterId; // first 16 bits for dynamic partitioning, last 16 bits for functional partitioning
logger.debug("partitionKey = {} partitionMask = {}", Integer.toBinaryString(partitionKey), Integer.toBinaryString(partitionMask));
partition.getPartitionKeys().put(data, new PartitionKeys(partitionMask, Sets.newHashSet(partitionKey)));
}
return newPartitions;
}
@Override
public void partitioned(Map<Integer, Partition<LogstreamTopN>> partitions)
{
}
/**
* extracts the meta information about the tuple
*
* @param tuple
*/
private void extractType(Map<String, DimensionObject<String>> tuple)
{
Iterator<Entry<String, DimensionObject<String>>> iterator = tuple.entrySet().iterator();
String randomKey = null;
if (iterator.hasNext()) {
randomKey = iterator.next().getKey();
}
String[] split = randomKey.split("\\|");
Number filterId = new Integer(split[3]);
recordType.put(LogstreamUtil.FILTER, filterId);
}
public static class LogstreamTopNStreamCodec extends KryoSerializableStreamCodec<Map<String, DimensionObject<String>>>
{
@Override
public int getPartition(Map<String, DimensionObject<String>> t)
{
Iterator<String> iterator = t.keySet().iterator();
String key = iterator.next();
String[] split = key.split("\\|");
int filterId = new Integer(split[3]); // filter id location in input record key
int ret = 0;
int hashCode = t.hashCode();
filterId = 0xffff & filterId; // clear out first 16 bits
ret = (hashCode << 16) | filterId; // first 16 bits represent hashcode, last 16 bits represent filter type
return ret;
}
private static final long serialVersionUID = 201411031411L;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.metadata.model;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author yangli9
*
*/
public class DataType {
public static final String VALID_TYPES_STRING = "any|char|varchar|boolean|binary" //
+ "|integer|tinyint|smallint|bigint|decimal|numeric|float|real|double" //
+ "|date|time|datetime|timestamp|byte|int|short|long|string|hllc" //
+ "|" + TblColRef.InnerDataTypeEnum.LITERAL.getDataType() //
+ "|" + TblColRef.InnerDataTypeEnum.DERIVED.getDataType();
private static final Pattern TYPE_PATTERN = Pattern.compile(
// standard sql types, ref:
// http://www.w3schools.com/sql/sql_datatypes_general.asp
"(" + VALID_TYPES_STRING + ")" + "\\s*" //
+ "(?:" + "[(]" + "([\\d\\s,]+)" + "[)]" + ")?", Pattern.CASE_INSENSITIVE);
public static final Set<String> INTEGER_FAMILY = new HashSet<String>();
public static final Set<String> NUMBER_FAMILY = new HashSet<String>();
public static final Set<String> DATETIME_FAMILY = new HashSet<String>();
public static final Set<String> STRING_FAMILY = new HashSet<String>();
private static final Set<Integer> HLLC_PRECISIONS = new HashSet<Integer>();
private static final Map<String, String> LEGACY_TYPE_MAP = new HashMap<String, String>();
static {
INTEGER_FAMILY.add("tinyint");
INTEGER_FAMILY.add("smallint");
INTEGER_FAMILY.add("integer");
INTEGER_FAMILY.add("bigint");
NUMBER_FAMILY.addAll(INTEGER_FAMILY);
NUMBER_FAMILY.add("float");
NUMBER_FAMILY.add("double");
NUMBER_FAMILY.add("decimal");
NUMBER_FAMILY.add("real");
NUMBER_FAMILY.add("numeric");
DATETIME_FAMILY.add("date");
DATETIME_FAMILY.add("time");
DATETIME_FAMILY.add("datetime");
DATETIME_FAMILY.add("timestamp");
STRING_FAMILY.add("varchar");
STRING_FAMILY.add("char");
LEGACY_TYPE_MAP.put("byte", "tinyint");
LEGACY_TYPE_MAP.put("int", "integer");
LEGACY_TYPE_MAP.put("short", "smallint");
LEGACY_TYPE_MAP.put("long", "bigint");
LEGACY_TYPE_MAP.put("string", "varchar");
LEGACY_TYPE_MAP.put("hllc10", "hllc(10)");
LEGACY_TYPE_MAP.put("hllc12", "hllc(12)");
LEGACY_TYPE_MAP.put("hllc14", "hllc(14)");
LEGACY_TYPE_MAP.put("hllc15", "hllc(15)");
LEGACY_TYPE_MAP.put("hllc16", "hllc(16)");
for (int i = 10; i <= 16; i++)
HLLC_PRECISIONS.add(i);
}
private static final ConcurrentMap<DataType, DataType> CACHE = new ConcurrentHashMap<DataType, DataType>();
public static final DataType ANY = DataType.getInstance("any");
public static DataType getInstance(String type) {
if (type == null)
return null;
DataType dataType = new DataType(type);
DataType cached = CACHE.get(dataType);
if (cached == null) {
CACHE.put(dataType, dataType);
cached = dataType;
}
return cached;
}
// ============================================================================
private String name;
private int precision;
private int scale;
DataType(String datatype) {
parseDataType(datatype);
}
private void parseDataType(String datatype) {
datatype = datatype.trim().toLowerCase();
datatype = replaceLegacy(datatype);
Matcher m = TYPE_PATTERN.matcher(datatype);
if (m.matches() == false)
throw new IllegalArgumentException("bad data type -- " + datatype + ", does not match " + TYPE_PATTERN);
name = replaceLegacy(m.group(1));
precision = -1;
scale = -1;
String leftover = m.group(2);
if (leftover != null) {
String[] parts = leftover.split("\\s*,\\s*");
for (int i = 0; i < parts.length; i++) {
int n;
try {
n = Integer.parseInt(parts[i]);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("bad data type -- " + datatype + ", precision/scale not numeric");
}
if (i == 0)
precision = n;
else if (i == 1)
scale = n;
else
throw new IllegalArgumentException("bad data type -- " + datatype + ", too many precision/scale parts");
}
}
// FIXME 256 for unknown string precision
if ((name.equals("char") || name.equals("varchar")) && precision == -1) {
precision = 256; // to save memory at frontend, e.g. tableau will
// allocate memory according to this
}
// FIXME (19,4) for unknown decimal precision
if ((name.equals("decimal") || name.equals("numeric")) && precision == -1) {
precision = 39;
scale = 16;
}
if (isHLLC() && HLLC_PRECISIONS.contains(precision) == false)
throw new IllegalArgumentException("HLLC precision must be one of " + HLLC_PRECISIONS);
}
private String replaceLegacy(String str) {
String replace = LEGACY_TYPE_MAP.get(str);
return replace == null ? str : replace;
}
public int getSpaceEstimate() {
if (isTinyInt()) {
return 1;
} else if (isSmallInt()) {
return 2;
} else if (isInt()) {
return 4;
} else if (isBigInt()) {
return 8;
} else if (isFloat()) {
return 4;
} else if (isDouble()) {
return 8;
} else if (isDecimal()) {
return 8;
} else if (isHLLC()) {
return 1 << precision;
}
throw new IllegalStateException("The return type : " + name + " is not recognized;");
}
public boolean isStringFamily() {
return STRING_FAMILY.contains(name);
}
public boolean isIntegerFamily() {
return INTEGER_FAMILY.contains(name);
}
public boolean isNumberFamily() {
return NUMBER_FAMILY.contains(name);
}
public boolean isDateTimeFamily() {
return DATETIME_FAMILY.contains(name);
}
public boolean isTinyInt() {
return name.equals("tinyint");
}
public boolean isSmallInt() {
return name.equals("smallint");
}
public boolean isInt() {
return name.equals("integer");
}
public boolean isBigInt() {
return name.equals("bigint");
}
public boolean isFloat() {
return name.equals("float");
}
public boolean isDouble() {
return name.equals("double");
}
public boolean isDecimal() {
return name.equals("decimal");
}
public boolean isHLLC() {
return name.equals("hllc");
}
public String getName() {
return name;
}
public int getPrecision() {
return precision;
}
public int getScale() {
return scale;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + precision;
result = prime * result + scale;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DataType other = (DataType) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (precision != other.precision)
return false;
if (scale != other.scale)
return false;
return true;
}
@Override
public String toString() {
if (precision < 0 && scale < 0)
return name;
else if (scale < 0)
return name + "(" + precision + ")";
else
return name + "(" + precision + "," + scale + ")";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.transport.mail;
import org.apache.axis2.format.MessageFormatterEx;
import org.apache.axis2.format.MessageFormatterExAdapter;
import org.apache.axis2.transport.base.*;
import org.apache.commons.logging.LogFactory;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.description.*;
import org.apache.axis2.AxisFault;
import org.apache.axis2.addressing.AddressingConstants;
import org.apache.axis2.transport.OutTransportInfo;
import org.apache.axis2.transport.MessageFormatter;
import org.apache.axiom.om.OMOutputFormat;
import org.apache.axiom.om.util.CommonUtils;
import javax.mail.*;
import javax.mail.internet.*;
import javax.activation.DataHandler;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.io.IOException;
/**
* The mail transport sender sends mail using an SMTP server configuration defined
* in the axis2.xml's transport sender definition
*/
public class MailTransportSender extends AbstractTransportSender
implements ManagementSupport {
private String smtpUsername = null;
private String smtpPassword = null;
/** Default from address for outgoing messages */
private InternetAddress smtpFromAddress = null;
/** A set of custom Bcc address for all outgoing messages */
private InternetAddress[] smtpBccAddresses = null;
/** Default mail format */
private String defaultMailFormat = "Text";
/** The default Session which can be safely shared */
private Session session = null;
/**
* The public constructor
*/
public MailTransportSender() {
log = LogFactory.getLog(MailTransportSender.class);
}
/**
* Initialize the Mail sender and be ready to send messages
* @param cfgCtx the axis2 configuration context
* @param transportOut the transport-out description
* @throws org.apache.axis2.AxisFault on error
*/
public void init(ConfigurationContext cfgCtx, TransportOutDescription transportOut) throws AxisFault {
super.init(cfgCtx, transportOut);
// initialize SMTP session
Properties props = new Properties();
List<Parameter> params = transportOut.getParameters();
for (Parameter p : params) {
props.put(p.getName(), p.getValue());
}
if (props.containsKey(MailConstants.MAIL_SMTP_FROM)) {
try {
smtpFromAddress = new InternetAddress(
(String) props.get(MailConstants.MAIL_SMTP_FROM));
} catch (AddressException e) {
handleException("Invalid default 'From' address : " +
props.get(MailConstants.MAIL_SMTP_FROM), e);
}
}
if (props.containsKey(MailConstants.MAIL_SMTP_BCC)) {
try {
smtpBccAddresses = InternetAddress.parse(
(String) props.get(MailConstants.MAIL_SMTP_BCC));
} catch (AddressException e) {
handleException("Invalid default 'Bcc' address : " +
props.get(MailConstants.MAIL_SMTP_BCC), e);
}
}
if (props.containsKey(MailConstants.TRANSPORT_MAIL_FORMAT)) {
defaultMailFormat = (String) props.get(MailConstants.TRANSPORT_MAIL_FORMAT);
}
smtpUsername = (String) props.get(MailConstants.MAIL_SMTP_USERNAME);
smtpPassword = (String) props.get(MailConstants.MAIL_SMTP_PASSWORD);
if (smtpUsername != null && smtpPassword != null) {
session = Session.getInstance(props, new Authenticator() {
public PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(smtpUsername, smtpPassword);
}
});
} else {
session = Session.getInstance(props, null);
}
MailUtils.setupLogging(session, log, transportOut);
// set the synchronise callback table
if (cfgCtx.getProperty(BaseConstants.CALLBACK_TABLE) == null){
cfgCtx.setProperty(BaseConstants.CALLBACK_TABLE, new ConcurrentHashMap());
}
}
/**
* Send the given message over the Mail transport
*
* @param msgCtx the axis2 message context
* @throws AxisFault on error
*/
public void sendMessage(MessageContext msgCtx, String targetAddress,
OutTransportInfo outTransportInfo) throws AxisFault {
MailOutTransportInfo mailOutInfo = null;
if (targetAddress != null) {
if (targetAddress.startsWith(MailConstants.TRANSPORT_NAME)) {
targetAddress = targetAddress.substring(MailConstants.TRANSPORT_NAME.length()+1);
}
if (msgCtx.getReplyTo() != null &&
!AddressingConstants.Final.WSA_NONE_URI.equals(msgCtx.getReplyTo().getAddress()) &&
!AddressingConstants.Final.WSA_ANONYMOUS_URL.equals(msgCtx.getReplyTo().getAddress())) {
String replyTo = msgCtx.getReplyTo().getAddress();
if (replyTo.startsWith(MailConstants.TRANSPORT_NAME)) {
replyTo = replyTo.substring(MailConstants.TRANSPORT_NAME.length()+1);
}
try {
mailOutInfo = new MailOutTransportInfo(new InternetAddress(replyTo));
} catch (AddressException e) {
handleException("Invalid reply address/es : " + replyTo, e);
}
} else {
mailOutInfo = new MailOutTransportInfo(smtpFromAddress);
}
try {
mailOutInfo.setTargetAddresses(InternetAddress.parse(targetAddress));
} catch (AddressException e) {
handleException("Invalid target address/es : " + targetAddress, e);
}
} else if (outTransportInfo != null && outTransportInfo instanceof MailOutTransportInfo) {
mailOutInfo = (MailOutTransportInfo) outTransportInfo;
}
if (mailOutInfo != null) {
try {
String messageID = sendMail(mailOutInfo, msgCtx);
// this is important in axis2 client side if the mail transport uses anonymous addressing
// the sender have to wait util the response comes.
if (!msgCtx.getOptions().isUseSeparateListener() && !msgCtx.isServerSide()){
waitForReply(msgCtx, messageID);
}
} catch (MessagingException e) {
handleException("Error generating mail message", e);
} catch (IOException e) {
handleException("Error generating mail message", e);
}
} else {
handleException("Unable to determine out transport information to send message");
}
}
private void waitForReply(MessageContext msgContext, String mailMessageID) throws AxisFault {
// piggy back message constant is used to pass a piggy back
// message context in asnych model
if (!(msgContext.getAxisOperation() instanceof OutInAxisOperation) &&
(msgContext.getProperty(org.apache.axis2.Constants.PIGGYBACK_MESSAGE) == null)) {
return;
}
ConfigurationContext configContext = msgContext.getConfigurationContext();
// if the mail message listner has not started we need to start it
if (!configContext.getListenerManager().isListenerRunning(MailConstants.TRANSPORT_NAME)) {
TransportInDescription mailTo =
configContext.getAxisConfiguration().getTransportIn(MailConstants.TRANSPORT_NAME);
if (mailTo == null) {
handleException("Could not find the transport receiver for " +
MailConstants.TRANSPORT_NAME);
}
configContext.getListenerManager().addListener(mailTo, false);
}
SynchronousCallback synchronousCallback = new SynchronousCallback(msgContext);
Map callBackMap = (Map) msgContext.getConfigurationContext().
getProperty(BaseConstants.CALLBACK_TABLE);
callBackMap.put(mailMessageID, synchronousCallback);
synchronized (synchronousCallback) {
try {
synchronousCallback.wait(msgContext.getOptions().getTimeOutInMilliSeconds());
} catch (InterruptedException e) {
handleException("Error occured while waiting ..", e);
}
}
if (!synchronousCallback.isComplete()){
// when timeout occurs remove this entry.
callBackMap.remove(mailMessageID);
handleException("Timeout while waiting for a response");
}
}
/**
* Populate email with a SOAP formatted message
* @param outInfo the out transport information holder
* @param msgContext the message context that holds the message to be written
* @throws AxisFault on error
* @return id of the send mail message
*/
private String sendMail(MailOutTransportInfo outInfo, MessageContext msgContext)
throws AxisFault, MessagingException, IOException {
OMOutputFormat format = BaseUtils.getOMOutputFormat(msgContext);
// Make sure that non textual attachements are sent with base64 transfer encoding
// instead of binary.
format.setProperty(OMOutputFormat.USE_CTE_BASE64_FOR_NON_TEXTUAL_ATTACHMENTS, true);
MessageFormatter messageFormatter = BaseUtils.getMessageFormatter(msgContext);
if (log.isDebugEnabled()) {
log.debug("Creating MIME message using message formatter " +
messageFormatter.getClass().getSimpleName());
}
WSMimeMessage message = null;
if (outInfo.getFromAddress() != null) {
message = new WSMimeMessage(session, outInfo.getFromAddress().getAddress());
} else {
message = new WSMimeMessage(session, "");
}
Map trpHeaders = (Map) msgContext.getProperty(MessageContext.TRANSPORT_HEADERS);
if (log.isDebugEnabled() && trpHeaders != null) {
log.debug("Using transport headers: " + trpHeaders);
}
// set From address - first check if this is a reply, then use from address from the
// transport out, else if any custom transport headers set on this message, or default
// to the transport senders default From address
if (outInfo.getTargetAddresses() != null && outInfo.getFromAddress() != null) {
if (log.isDebugEnabled()) {
log.debug("Setting From header to " + outInfo.getFromAddress().getAddress() +
" from OutTransportInfo");
}
message.setFrom(outInfo.getFromAddress());
message.setReplyTo((new Address []{outInfo.getFromAddress()}));
} else if (trpHeaders != null && trpHeaders.containsKey(MailConstants.MAIL_HEADER_FROM)) {
InternetAddress from =
new InternetAddress((String) trpHeaders.get(MailConstants.MAIL_HEADER_FROM));
if (log.isDebugEnabled()) {
log.debug("Setting From header to " + from.getAddress() +
" from transport headers");
}
message.setFrom(from);
message.setReplyTo(new Address[] { from });
} else {
if (smtpFromAddress != null) {
if (log.isDebugEnabled()) {
log.debug("Setting From header to " + smtpFromAddress.getAddress() +
" from transport configuration");
}
message.setFrom(smtpFromAddress);
message.setReplyTo(new Address[] {smtpFromAddress});
} else {
handleException("From address for outgoing message cannot be determined");
}
}
// set To address/es to any custom transport header set on the message, else use the reply
// address from the out transport information
if (trpHeaders != null && trpHeaders.containsKey(MailConstants.MAIL_HEADER_TO)) {
Address[] to =
InternetAddress.parse((String) trpHeaders.get(MailConstants.MAIL_HEADER_TO));
if (log.isDebugEnabled()) {
log.debug("Setting To header to " + InternetAddress.toString(to) +
" from transport headers");
}
message.setRecipients(Message.RecipientType.TO, to);
} else if (outInfo.getTargetAddresses() != null) {
if (log.isDebugEnabled()) {
log.debug("Setting To header to " + InternetAddress.toString(
outInfo.getTargetAddresses()) + " from OutTransportInfo");
}
message.setRecipients(Message.RecipientType.TO, outInfo.getTargetAddresses());
} else {
handleException("To address for outgoing message cannot be determined");
}
// set Cc address/es to any custom transport header set on the message, else use the
// Cc list from original request message
if (trpHeaders != null && trpHeaders.containsKey(MailConstants.MAIL_HEADER_CC)) {
Address[] cc =
InternetAddress.parse((String) trpHeaders.get(MailConstants.MAIL_HEADER_CC));
if (log.isDebugEnabled()) {
log.debug("Setting Cc header to " + InternetAddress.toString(cc) +
" from transport headers");
}
message.setRecipients(Message.RecipientType.CC, cc);
} else if (outInfo.getCcAddresses() != null) {
if (log.isDebugEnabled()) {
log.debug("Setting Cc header to " + InternetAddress.toString(
outInfo.getCcAddresses()) + " from OutTransportInfo");
}
message.setRecipients(Message.RecipientType.CC, outInfo.getCcAddresses());
}
// set Bcc address/es to any custom addresses set at the transport sender level + any
// custom transport header
if (trpHeaders != null && trpHeaders.containsKey(MailConstants.MAIL_HEADER_BCC)) {
InternetAddress[] bcc =
InternetAddress.parse((String) trpHeaders.get(MailConstants.MAIL_HEADER_BCC));
if (log.isDebugEnabled()) {
log.debug("Adding Bcc header values " + InternetAddress.toString(bcc) +
" from transport headers");
}
message.addRecipients(Message.RecipientType.BCC, bcc);
}
if (smtpBccAddresses != null) {
if (log.isDebugEnabled()) {
log.debug("Adding Bcc header values " + InternetAddress.toString(smtpBccAddresses) +
" from transport configuration");
}
message.addRecipients(Message.RecipientType.BCC, smtpBccAddresses);
}
// set subject
if (trpHeaders != null && trpHeaders.containsKey(MailConstants.MAIL_HEADER_SUBJECT)) {
if (log.isDebugEnabled()) {
log.debug("Setting Subject header to '" + trpHeaders.get(
MailConstants.MAIL_HEADER_SUBJECT) + "' from transport headers");
}
message.setSubject((String) trpHeaders.get(MailConstants.MAIL_HEADER_SUBJECT));
} else if (outInfo.getSubject() != null) {
if (log.isDebugEnabled()) {
log.debug("Setting Subject header to '" + outInfo.getSubject() +
"' from transport headers");
}
message.setSubject(outInfo.getSubject());
} else {
if (log.isDebugEnabled()) {
log.debug("Generating default Subject header from SOAP action");
}
message.setSubject(BaseConstants.SOAPACTION + ": " + msgContext.getSoapAction());
}
//TODO: use a combined message id for smtp so that it generates a unique id while
// being able to support asynchronous communication.
// if a custom message id is set, use it
// if (msgContext.getMessageID() != null) {
// message.setHeader(MailConstants.MAIL_HEADER_MESSAGE_ID, msgContext.getMessageID());
// message.setHeader(MailConstants.MAIL_HEADER_X_MESSAGE_ID, msgContext.getMessageID());
// }
// if this is a reply, set reference to original message
if (outInfo.getRequestMessageID() != null) {
message.setHeader(MailConstants.MAIL_HEADER_IN_REPLY_TO, outInfo.getRequestMessageID());
message.setHeader(MailConstants.MAIL_HEADER_REFERENCES, outInfo.getRequestMessageID());
} else {
if (trpHeaders != null &&
trpHeaders.containsKey(MailConstants.MAIL_HEADER_IN_REPLY_TO)) {
message.setHeader(MailConstants.MAIL_HEADER_IN_REPLY_TO,
(String) trpHeaders.get(MailConstants.MAIL_HEADER_IN_REPLY_TO));
}
if (trpHeaders != null && trpHeaders.containsKey(MailConstants.MAIL_HEADER_REFERENCES)) {
message.setHeader(MailConstants.MAIL_HEADER_REFERENCES,
(String) trpHeaders.get(MailConstants.MAIL_HEADER_REFERENCES));
}
}
// set Date
message.setSentDate(new Date());
// set SOAPAction header
message.setHeader(BaseConstants.SOAPACTION, msgContext.getSoapAction());
// write body
MessageFormatterEx messageFormatterEx;
if (messageFormatter instanceof MessageFormatterEx) {
messageFormatterEx = (MessageFormatterEx)messageFormatter;
} else {
messageFormatterEx = new MessageFormatterExAdapter(messageFormatter);
}
DataHandler dataHandler = new DataHandler(messageFormatterEx.getDataSource(msgContext, format, msgContext.getSoapAction()));
MimeMultipart mimeMultiPart = null;
String mFormat = (String) msgContext.getProperty(MailConstants.TRANSPORT_MAIL_FORMAT);
if (mFormat == null) {
mFormat = defaultMailFormat;
}
if (log.isDebugEnabled()) {
log.debug("Using mail format '" + mFormat + "'");
}
MimePart mainPart;
boolean isMultiPart = MailConstants.TRANSPORT_FORMAT_MP.equals(mFormat);
boolean isAttachFile = MailConstants.TRANSPORT_FORMAT_ATTACHMENT.equals(mFormat);
if (isMultiPart || isAttachFile) {
mimeMultiPart = new MimeMultipart();
MimeBodyPart mimeBodyPart1 = new MimeBodyPart();
String body = (String) msgContext.getProperty(MailConstants.TRANSPORT_MAIL_BODY_WHEN_ATTACHED);
if (body == null) {
body = "Web Service Message Attached";
}
String bodyMime = (String) msgContext.getProperty(MailConstants.TRANSPORT_MAIL_BODY_MIME_WHEN_ATTACHED);
if (bodyMime == null) {
bodyMime = "text/plain";
}
mimeBodyPart1.setContent(body, bodyMime);
MimeBodyPart mimeBodyPart2 = new MimeBodyPart();
mimeMultiPart.addBodyPart(mimeBodyPart1);
mimeMultiPart.addBodyPart(mimeBodyPart2);
message.setContent(mimeMultiPart);
if(isAttachFile){
String fileName = (String) msgContext.getProperty(
MailConstants.TRANSPORT_FORMAT_ATTACHMENT_FILE);
if (fileName != null) {
mimeBodyPart2.setFileName(fileName);
} else {
mimeBodyPart2.setFileName("attachment");
}
}
mainPart = mimeBodyPart2;
} else {
mainPart = message;
}
try {
mainPart.setHeader(BaseConstants.SOAPACTION, msgContext.getSoapAction());
mainPart.setDataHandler(dataHandler);
// AXIOM's idea of what is textual also includes application/xml and
// application/soap+xml (which JavaMail considers as binary). For these content types
// always use quoted-printable transfer encoding. Note that JavaMail is a bit smarter
// here because it can choose between 7bit and quoted-printable automatically, but it
// needs to scan the entire content to determine this.
if (msgContext.getOptions().getProperty("Content-Transfer-Encoding") != null) {
mainPart.setHeader("Content-Transfer-Encoding",
(String) msgContext.getOptions().getProperty("Content-Transfer-Encoding"));
} else {
String contentType = dataHandler.getContentType().toLowerCase();
if (!contentType.startsWith("multipart/") && CommonUtils.isTextualPart(contentType)) {
mainPart.setHeader("Content-Transfer-Encoding", "quoted-printable");
}
}
//setting any custom headers defined by the user
if (msgContext.getOptions().getProperty(MailConstants.TRANSPORT_MAIL_CUSTOM_HEADERS) != null) {
Map customTransportHeaders = (Map)msgContext.getOptions().getProperty(MailConstants.TRANSPORT_MAIL_CUSTOM_HEADERS);
for (Object header: customTransportHeaders.keySet()){
mainPart.setHeader((String)header,(String)customTransportHeaders.get(header));
}
}
log.debug("Sending message");
Transport.send(message);
// update metrics
metrics.incrementMessagesSent(msgContext);
long bytesSent = message.getBytesSent();
if (bytesSent != -1) {
metrics.incrementBytesSent(msgContext, bytesSent);
}
} catch (MessagingException e) {
metrics.incrementFaultsSending();
handleException("Error creating mail message or sending it to the configured server", e);
}
return message.getMessageID();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import org.apache.nifi.annotation.behavior.DynamicProperty;
import org.apache.nifi.annotation.behavior.EventDriven;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.SideEffectFree;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.lookup.LookupService;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.record.path.FieldValue;
import org.apache.nifi.record.path.RecordPath;
import org.apache.nifi.record.path.RecordPathResult;
import org.apache.nifi.record.path.util.RecordPathCache;
import org.apache.nifi.record.path.validation.RecordPathValidator;
import org.apache.nifi.serialization.record.DataType;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.serialization.record.util.DataTypeUtils;
import org.apache.nifi.util.Tuple;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
@EventDriven
@SideEffectFree
@SupportsBatching
@InputRequirement(Requirement.INPUT_REQUIRED)
@WritesAttributes({
@WritesAttribute(attribute = "mime.type", description = "Sets the mime.type attribute to the MIME Type specified by the Record Writer"),
@WritesAttribute(attribute = "record.count", description = "The number of records in the FlowFile")
})
@Tags({"lookup", "enrichment", "route", "record", "csv", "json", "avro", "database", "db", "logs", "convert", "filter"})
@CapabilityDescription("Extracts one or more fields from a Record and looks up a value for those fields in a LookupService. If a result is returned by the LookupService, "
+ "that result is optionally added to the Record. In this case, the processor functions as an Enrichment processor. Regardless, the Record is then "
+ "routed to either the 'matched' relationship or 'unmatched' relationship (if the 'Routing Strategy' property is configured to do so), "
+ "indicating whether or not a result was returned by the LookupService, allowing the processor to also function as a Routing processor. "
+ "The \"coordinates\" to use for looking up a value in the Lookup Service are defined by adding a user-defined property. Each property that is added will have an entry added "
+ "to a Map, where the name of the property becomes the Map Key and the value returned by the RecordPath becomes the value for that key. If multiple values are returned by the "
+ "RecordPath, then the Record will be routed to the 'unmatched' relationship (or 'success', depending on the 'Routing Strategy' property's configuration). "
+ "If one or more fields match the Result RecordPath, all fields "
+ "that match will be updated. If there is no match in the configured LookupService, then no fields will be updated. I.e., it will not overwrite an existing value in the Record "
+ "with a null value. Please note, however, that if the results returned by the LookupService are not accounted for in your schema (specifically, "
+ "the schema that is configured for your Record Writer) then the fields will not be written out to the FlowFile.")
@DynamicProperty(name = "Value To Lookup", value = "Valid Record Path", expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES,
description = "A RecordPath that points to the field whose value will be looked up in the configured Lookup Service")
@SeeAlso(value = {ConvertRecord.class, SplitRecord.class},
classNames = {"org.apache.nifi.lookup.SimpleKeyValueLookupService", "org.apache.nifi.lookup.maxmind.IPLookupService", "org.apache.nifi.lookup.db.DatabaseRecordLookupService"})
public class LookupRecord extends AbstractRouteRecord<Tuple<Map<String, RecordPath>, RecordPath>> {
private volatile RecordPathCache recordPathCache = new RecordPathCache(25);
private volatile LookupService<?> lookupService;
static final AllowableValue ROUTE_TO_SUCCESS = new AllowableValue("route-to-success", "Route to 'success'",
"Records will be routed to a 'success' Relationship regardless of whether or not there is a match in the configured Lookup Service");
static final AllowableValue ROUTE_TO_MATCHED_UNMATCHED = new AllowableValue("route-to-matched-unmatched", "Route to 'matched' or 'unmatched'",
"Records will be routed to either a 'matched' or an 'unmatched' Relationship depending on whether or not there was a match in the configured Lookup Service. "
+ "A single input FlowFile may result in two different output FlowFiles.");
static final AllowableValue RESULT_ENTIRE_RECORD = new AllowableValue("insert-entire-record", "Insert Entire Record",
"The entire Record that is retrieved from the Lookup Service will be inserted into the destination path.");
static final AllowableValue RESULT_RECORD_FIELDS = new AllowableValue("record-fields", "Insert Record Fields",
"All of the fields in the Record that is retrieved from the Lookup Service will be inserted into the destination path.");
static final AllowableValue USE_PROPERTY = new AllowableValue("use-property", "Use Property",
"The \"Result RecordPath\" property will be used to determine which part of the record should be updated with the value returned by the Lookup Service");
static final AllowableValue REPLACE_EXISTING_VALUES = new AllowableValue("replace-existing-values", "Replace Existing Values",
"The \"Result RecordPath\" property will be ignored and the lookup service must be a single simple key lookup service. Every dynamic property value should "
+ "be a record path. For each dynamic property, the value contained in the field corresponding to the record path will be used as the key in the Lookup "
+ "Service and the value returned by the Lookup Service will be used to replace the existing value. It is possible to configure multiple dynamic properties "
+ "to replace multiple values in one execution. This strategy only supports simple types replacements (strings, integers, etc).");
static final PropertyDescriptor LOOKUP_SERVICE = new PropertyDescriptor.Builder()
.name("lookup-service")
.displayName("Lookup Service")
.description("The Lookup Service to use in order to lookup a value in each Record")
.identifiesControllerService(LookupService.class)
.required(true)
.build();
static final PropertyDescriptor RESULT_RECORD_PATH = new PropertyDescriptor.Builder()
.name("result-record-path")
.displayName("Result RecordPath")
.description("A RecordPath that points to the field whose value should be updated with whatever value is returned from the Lookup Service. "
+ "If not specified, the value that is returned from the Lookup Service will be ignored, except for determining whether the FlowFile should "
+ "be routed to the 'matched' or 'unmatched' Relationship.")
.addValidator(new RecordPathValidator())
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
.build();
static final PropertyDescriptor RESULT_CONTENTS = new PropertyDescriptor.Builder()
.name("result-contents")
.displayName("Record Result Contents")
.description("When a result is obtained that contains a Record, this property determines whether the Record itself is inserted at the configured "
+ "path or if the contents of the Record (i.e., the sub-fields) will be inserted at the configured path.")
.allowableValues(RESULT_ENTIRE_RECORD, RESULT_RECORD_FIELDS)
.defaultValue(RESULT_ENTIRE_RECORD.getValue())
.required(true)
.build();
static final PropertyDescriptor ROUTING_STRATEGY = new PropertyDescriptor.Builder()
.name("routing-strategy")
.displayName("Routing Strategy")
.description("Specifies how to route records after a Lookup has completed")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.allowableValues(ROUTE_TO_SUCCESS, ROUTE_TO_MATCHED_UNMATCHED)
.defaultValue(ROUTE_TO_SUCCESS.getValue())
.required(true)
.build();
static final PropertyDescriptor REPLACEMENT_STRATEGY = new PropertyDescriptor.Builder()
.name("record-update-strategy")
.displayName("Record Update Strategy")
.description("This property defines the strategy to use when updating the record with the value returned by the Lookup Service.")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.allowableValues(REPLACE_EXISTING_VALUES, USE_PROPERTY)
.defaultValue(USE_PROPERTY.getValue())
.required(true)
.build();
static final Relationship REL_MATCHED = new Relationship.Builder()
.name("matched")
.description("All records for which the lookup returns a value will be routed to this relationship")
.build();
static final Relationship REL_UNMATCHED = new Relationship.Builder()
.name("unmatched")
.description("All records for which the lookup does not have a matching value will be routed to this relationship")
.build();
static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("All records will be sent to this Relationship if configured to do so, unless a failure occurs")
.build();
private static final Set<Relationship> MATCHED_COLLECTION = Collections.singleton(REL_MATCHED);
private static final Set<Relationship> UNMATCHED_COLLECTION = Collections.singleton(REL_UNMATCHED);
private static final Set<Relationship> SUCCESS_COLLECTION = Collections.singleton(REL_SUCCESS);
private volatile Set<Relationship> relationships = new HashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE));
private volatile boolean routeToMatchedUnmatched = false;
@OnScheduled
public void onScheduled(final ProcessContext context) {
this.lookupService = context.getProperty(LOOKUP_SERVICE).asControllerService(LookupService.class);
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.addAll(super.getSupportedPropertyDescriptors());
properties.add(LOOKUP_SERVICE);
properties.add(RESULT_RECORD_PATH);
properties.add(ROUTING_STRATEGY);
properties.add(RESULT_CONTENTS);
properties.add(REPLACEMENT_STRATEGY);
return properties;
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
.name(propertyDescriptorName)
.description("A RecordPath that points to the field whose value will be looked up in the configured Lookup Service")
.addValidator(new RecordPathValidator())
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
.dynamic(true)
.build();
}
@Override
@SuppressWarnings("unchecked")
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
final Set<String> dynamicPropNames = validationContext.getProperties().keySet().stream()
.filter(PropertyDescriptor::isDynamic)
.map(PropertyDescriptor::getName)
.collect(Collectors.toSet());
if (dynamicPropNames.isEmpty()) {
return Collections.singleton(new ValidationResult.Builder()
.subject("User-Defined Properties")
.valid(false)
.explanation("At least one user-defined property must be specified.")
.build());
}
final Set<String> requiredKeys = validationContext.getProperty(LOOKUP_SERVICE).asControllerService(LookupService.class).getRequiredKeys();
if(validationContext.getProperty(REPLACEMENT_STRATEGY).getValue().equals(REPLACE_EXISTING_VALUES.getValue())) {
// it must be a single key lookup service
if(requiredKeys.size() != 1) {
return Collections.singleton(new ValidationResult.Builder()
.subject(LOOKUP_SERVICE.getDisplayName())
.valid(false)
.explanation("When using \"" + REPLACE_EXISTING_VALUES.getDisplayName() + "\" as Record Update Strategy, "
+ "only a Lookup Service requiring a single key can be used.")
.build());
}
} else {
final Set<String> missingKeys = requiredKeys.stream()
.filter(key -> !dynamicPropNames.contains(key))
.collect(Collectors.toSet());
if (!missingKeys.isEmpty()) {
final List<ValidationResult> validationResults = new ArrayList<>();
for (final String missingKey : missingKeys) {
final ValidationResult result = new ValidationResult.Builder()
.subject(missingKey)
.valid(false)
.explanation("The configured Lookup Services requires that a key be provided with the name '" + missingKey
+ "'. Please add a new property to this Processor with a name '" + missingKey
+ "' and provide a RecordPath that can be used to retrieve the appropriate value.")
.build();
validationResults.add(result);
}
return validationResults;
}
}
return Collections.emptyList();
}
@Override
public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
if (ROUTING_STRATEGY.equals(descriptor)) {
if (ROUTE_TO_MATCHED_UNMATCHED.getValue().equalsIgnoreCase(newValue)) {
final Set<Relationship> matchedUnmatchedRels = new HashSet<>();
matchedUnmatchedRels.add(REL_MATCHED);
matchedUnmatchedRels.add(REL_UNMATCHED);
matchedUnmatchedRels.add(REL_FAILURE);
this.relationships = matchedUnmatchedRels;
this.routeToMatchedUnmatched = true;
} else {
final Set<Relationship> successRels = new HashSet<>();
successRels.add(REL_SUCCESS);
successRels.add(REL_FAILURE);
this.relationships = successRels;
this.routeToMatchedUnmatched = false;
}
}
}
@Override
protected Set<Relationship> route(final Record record, final RecordSchema writeSchema, final FlowFile flowFile, final ProcessContext context,
final Tuple<Map<String, RecordPath>, RecordPath> flowFileContext) {
final boolean isInPlaceReplacement = context.getProperty(REPLACEMENT_STRATEGY).getValue().equals(REPLACE_EXISTING_VALUES.getValue());
if(isInPlaceReplacement) {
return doInPlaceReplacement(record, flowFile, context, flowFileContext);
} else {
return doResultPathReplacement(record, flowFile, context, flowFileContext);
}
}
private Set<Relationship> doInPlaceReplacement(Record record, FlowFile flowFile, ProcessContext context, Tuple<Map<String, RecordPath>, RecordPath> flowFileContext) {
final String lookupKey = (String) context.getProperty(LOOKUP_SERVICE).asControllerService(LookupService.class).getRequiredKeys().iterator().next();
final Map<String, RecordPath> recordPaths = flowFileContext.getKey();
final Map<String, Object> lookupCoordinates = new HashMap<>(recordPaths.size());
for (final Map.Entry<String, RecordPath> entry : recordPaths.entrySet()) {
final String coordinateKey = entry.getKey();
final RecordPath recordPath = entry.getValue();
final RecordPathResult pathResult = recordPath.evaluate(record);
final List<FieldValue> lookupFieldValues = pathResult.getSelectedFields()
.filter(fieldVal -> fieldVal.getValue() != null)
.collect(Collectors.toList());
if (lookupFieldValues.isEmpty()) {
final Set<Relationship> rels = routeToMatchedUnmatched ? UNMATCHED_COLLECTION : SUCCESS_COLLECTION;
getLogger().debug("RecordPath for property '{}' did not match any fields in a record for {}; routing record to {}", new Object[] {coordinateKey, flowFile, rels});
return rels;
}
for (FieldValue fieldValue : lookupFieldValues) {
final Object coordinateValue = (fieldValue.getValue() instanceof Number || fieldValue.getValue() instanceof Boolean)
? fieldValue.getValue() : DataTypeUtils.toString(fieldValue.getValue(), (String) null);
lookupCoordinates.put(lookupKey, coordinateValue);
final Optional<?> lookupValueOption;
try {
lookupValueOption = lookupService.lookup(lookupCoordinates, flowFile.getAttributes());
} catch (final Exception e) {
throw new ProcessException("Failed to lookup coordinates " + lookupCoordinates + " in Lookup Service", e);
}
if (!lookupValueOption.isPresent()) {
final Set<Relationship> rels = routeToMatchedUnmatched ? UNMATCHED_COLLECTION : SUCCESS_COLLECTION;
return rels;
}
final Object lookupValue = lookupValueOption.get();
final DataType inferredDataType = DataTypeUtils.inferDataType(lookupValue, RecordFieldType.STRING.getDataType());
fieldValue.updateValue(lookupValue, inferredDataType);
}
}
final Set<Relationship> rels = routeToMatchedUnmatched ? MATCHED_COLLECTION : SUCCESS_COLLECTION;
return rels;
}
private Set<Relationship> doResultPathReplacement(Record record, FlowFile flowFile, ProcessContext context, Tuple<Map<String, RecordPath>, RecordPath> flowFileContext) {
final Map<String, RecordPath> recordPaths = flowFileContext.getKey();
final Map<String, Object> lookupCoordinates = new HashMap<>(recordPaths.size());
for (final Map.Entry<String, RecordPath> entry : recordPaths.entrySet()) {
final String coordinateKey = entry.getKey();
final RecordPath recordPath = entry.getValue();
final RecordPathResult pathResult = recordPath.evaluate(record);
final List<FieldValue> lookupFieldValues = pathResult.getSelectedFields()
.filter(fieldVal -> fieldVal.getValue() != null)
.collect(Collectors.toList());
if (lookupFieldValues.isEmpty()) {
final Set<Relationship> rels = routeToMatchedUnmatched ? UNMATCHED_COLLECTION : SUCCESS_COLLECTION;
getLogger().debug("RecordPath for property '{}' did not match any fields in a record for {}; routing record to {}", new Object[] {coordinateKey, flowFile, rels});
return rels;
}
if (lookupFieldValues.size() > 1) {
final Set<Relationship> rels = routeToMatchedUnmatched ? UNMATCHED_COLLECTION : SUCCESS_COLLECTION;
getLogger().debug("RecordPath for property '{}' matched {} fields in a record for {}; routing record to {}",
new Object[] {coordinateKey, lookupFieldValues.size(), flowFile, rels});
return rels;
}
final FieldValue fieldValue = lookupFieldValues.get(0);
final Object coordinateValue = (fieldValue.getValue() instanceof Number || fieldValue.getValue() instanceof Boolean)
? fieldValue.getValue() : DataTypeUtils.toString(fieldValue.getValue(), (String) null);
lookupCoordinates.put(coordinateKey, coordinateValue);
}
final Optional<?> lookupValueOption;
try {
lookupValueOption = lookupService.lookup(lookupCoordinates, flowFile.getAttributes());
} catch (final Exception e) {
throw new ProcessException("Failed to lookup coordinates " + lookupCoordinates + " in Lookup Service", e);
}
if (!lookupValueOption.isPresent()) {
final Set<Relationship> rels = routeToMatchedUnmatched ? UNMATCHED_COLLECTION : SUCCESS_COLLECTION;
return rels;
}
// Ensure that the Record has the appropriate schema to account for the newly added values
final RecordPath resultPath = flowFileContext.getValue();
if (resultPath != null) {
final Object lookupValue = lookupValueOption.get();
final RecordPathResult resultPathResult = flowFileContext.getValue().evaluate(record);
final String resultContentsValue = context.getProperty(RESULT_CONTENTS).getValue();
if (RESULT_RECORD_FIELDS.getValue().equals(resultContentsValue) && lookupValue instanceof Record) {
final Record lookupRecord = (Record) lookupValue;
// User wants to add all fields of the resultant Record to the specified Record Path.
// If the destination Record Path returns to us a Record, then we will add all field values of
// the Lookup Record to the destination Record. However, if the destination Record Path returns
// something other than a Record, then we can't add the fields to it. We can only replace it,
// because it doesn't make sense to add fields to anything but a Record.
resultPathResult.getSelectedFields().forEach(fieldVal -> {
final Object destinationValue = fieldVal.getValue();
if (destinationValue instanceof Record) {
final Record destinationRecord = (Record) destinationValue;
for (final String fieldName : lookupRecord.getRawFieldNames()) {
final Object value = lookupRecord.getValue(fieldName);
final Optional<RecordField> recordFieldOption = lookupRecord.getSchema().getField(fieldName);
if (recordFieldOption.isPresent()) {
// Even if the looked up field is not nullable, if the lookup key didn't match with any record,
// and matched/unmatched records are written to the same FlowFile routed to 'success' relationship,
// then enriched fields should be nullable to support unmatched records whose enriched fields will be null.
RecordField field = recordFieldOption.get();
if (!routeToMatchedUnmatched && !field.isNullable()) {
field = new RecordField(field.getFieldName(), field.getDataType(), field.getDefaultValue(), field.getAliases(), true);
}
destinationRecord.setValue(field, value);
} else {
destinationRecord.setValue(fieldName, value);
}
}
} else {
final Optional<Record> parentOption = fieldVal.getParentRecord();
parentOption.ifPresent(parent -> parent.setValue(fieldVal.getField(), lookupRecord));
}
});
} else {
final DataType inferredDataType = DataTypeUtils.inferDataType(lookupValue, RecordFieldType.STRING.getDataType());
resultPathResult.getSelectedFields().forEach(fieldVal -> fieldVal.updateValue(lookupValue, inferredDataType));
}
record.incorporateInactiveFields();
}
final Set<Relationship> rels = routeToMatchedUnmatched ? MATCHED_COLLECTION : SUCCESS_COLLECTION;
return rels;
}
@Override
protected boolean isRouteOriginal() {
return false;
}
@Override
protected Tuple<Map<String, RecordPath>, RecordPath> getFlowFileContext(final FlowFile flowFile, final ProcessContext context) {
final Map<String, RecordPath> recordPaths = new HashMap<>();
for (final PropertyDescriptor prop : context.getProperties().keySet()) {
if (!prop.isDynamic()) {
continue;
}
final String pathText = context.getProperty(prop).evaluateAttributeExpressions(flowFile).getValue();
final RecordPath lookupRecordPath = recordPathCache.getCompiled(pathText);
recordPaths.put(prop.getName(), lookupRecordPath);
}
final RecordPath resultRecordPath;
if (context.getProperty(RESULT_RECORD_PATH).isSet()) {
final String resultPathText = context.getProperty(RESULT_RECORD_PATH).evaluateAttributeExpressions(flowFile).getValue();
resultRecordPath = recordPathCache.getCompiled(resultPathText);
} else {
resultRecordPath = null;
}
return new Tuple<>(recordPaths, resultRecordPath);
}
}
| |
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.wire.schema;
import com.google.common.collect.ImmutableList;
import com.squareup.wire.schema.internal.parser.FieldElement;
import java.util.Collection;
import java.util.List;
import static com.squareup.wire.schema.Options.FIELD_OPTIONS;
public final class Field {
static final ProtoMember DEPRECATED = ProtoMember.get(FIELD_OPTIONS, "deprecated");
static final ProtoMember PACKED = ProtoMember.get(FIELD_OPTIONS, "packed");
private final String packageName;
private final Location location;
private final Label label;
private final String name;
private final String documentation;
private final int tag;
private final String defaultValue;
private final String elementType;
private final boolean extension;
private final Options options;
private ProtoType type;
private Object deprecated;
private Object packed;
private boolean redacted;
private Field(String packageName, Location location, Label label, String name,
String documentation, int tag, String defaultValue, String elementType, Options options,
boolean extension) {
this.packageName = packageName;
this.location = location;
this.label = label;
this.name = name;
this.documentation = documentation;
this.tag = tag;
this.defaultValue = defaultValue;
this.elementType = elementType;
this.extension = extension;
this.options = options;
}
static ImmutableList<Field> fromElements(String packageName, List<FieldElement> fieldElements,
boolean extension) {
ImmutableList.Builder<Field> fields = ImmutableList.builder();
for (FieldElement field : fieldElements) {
fields.add(new Field(packageName, field.getLocation(), field.getLabel(), field.getName(),
field.getDocumentation(), field.getTag(), field.getDefaultValue(), field.getType(),
new Options(Options.FIELD_OPTIONS, field.getOptions()), extension));
}
return fields.build();
}
static ImmutableList<FieldElement> toElements(List<Field> fields) {
ImmutableList.Builder<FieldElement> elements = new ImmutableList.Builder<>();
for (Field field : fields) {
elements.add(new FieldElement(
field.location,
field.label,
field.elementType,
field.name,
field.defaultValue,
field.tag,
field.documentation,
field.options.toElements()
));
}
return elements.build();
}
public Location location() {
return location;
}
public String packageName() {
return packageName;
}
public Label label() {
return label;
}
public boolean isRepeated() {
return label() == Label.REPEATED;
}
public boolean isOptional() {
return label() == Label.OPTIONAL;
}
public boolean isRequired() {
return label() == Label.REQUIRED;
}
public ProtoType type() {
return type;
}
public String name() {
return name;
}
/**
* Returns this field's name, prefixed with its package name. Uniquely identifies extension
* fields, such as in options.
*/
public String qualifiedName() {
return packageName != null
? packageName + '.' + name
: name;
}
public int tag() {
return tag;
}
public String documentation() {
return documentation;
}
public Options options() {
return options;
}
public boolean isDeprecated() {
return "true".equals(deprecated);
}
public boolean isPacked() {
return "true".equals(packed);
}
public boolean isRedacted() {
return redacted;
}
public String getDefault() {
return defaultValue;
}
private boolean isPackable(Linker linker, ProtoType type) {
return !type.equals(ProtoType.STRING)
&& !type.equals(ProtoType.BYTES)
&& !(linker.get(type) instanceof MessageType);
}
public boolean isExtension() {
return extension;
}
void link(Linker linker) {
linker = linker.withContext(this);
type = linker.resolveType(elementType);
}
void linkOptions(Linker linker) {
linker = linker.withContext(this);
options.link(linker);
deprecated = options().get(DEPRECATED);
packed = options().get(PACKED);
// We allow any package name to be used as long as it ends with '.redacted'.
redacted = options().optionMatches(".*\\.redacted", "true");
}
void validate(Linker linker) {
linker = linker.withContext(this);
if (isPacked() && !isPackable(linker, type)) {
linker.addError("packed=true not permitted on %s", type);
}
if (extension && isRequired()) {
linker.addError("extension fields cannot be required", type);
}
linker.validateImport(location(), type);
}
Field retainAll(Schema schema, MarkSet markSet) {
// For map types only the value can participate in pruning as the key will always be scalar.
if (type.isMap() && !markSet.contains(type.valueType())) return null;
if (!markSet.contains(type)) return null;
Field result = new Field(packageName, location, label, name, documentation, tag, defaultValue,
elementType, options.retainAll(schema, markSet), extension);
result.type = type;
result.deprecated = deprecated;
result.packed = packed;
result.redacted = redacted;
return result;
}
static ImmutableList<Field> retainAll(
Schema schema, MarkSet markSet, ProtoType enclosingType, Collection<Field> fields) {
ImmutableList.Builder<Field> result = ImmutableList.builder();
for (Field field : fields) {
Field retainedField = field.retainAll(schema, markSet);
if (retainedField != null && markSet.contains(ProtoMember.get(enclosingType, field.name()))) {
result.add(retainedField);
}
}
return result.build();
}
@Override public String toString() {
return name();
}
public enum Label {
OPTIONAL, REQUIRED, REPEATED,
/** Indicates the field is a member of a {@code oneof} block. */
ONE_OF
}
}
| |
package eu.newsreader.eventcoreference.storyline;
import eu.newsreader.eventcoreference.input.EsoReader;
import eu.newsreader.eventcoreference.input.FrameNetReader;
import eu.newsreader.eventcoreference.input.TrigKSTripleReader;
import eu.newsreader.eventcoreference.util.EuroVoc;
import eu.newsreader.eventcoreference.util.Util;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.*;
/**
* Created by piek on 1/3/14.
*/
@Deprecated
public class QueryKnowledgeStoreToJsonStoryPerspectivesOld {
static HashMap<String, ArrayList<String>> iliMap = new HashMap<String, ArrayList<String>>();
static ArrayList<String> blacklist = new ArrayList<String>();
static boolean ALL = false; /// if true we do not filter events
static boolean SKIPPEVENTS = false; /// if true we we exclude perspective events from the stories
static boolean MERGE = false;
static String timeGran = "D";
static String actionOnt = "";
static int actionSim = 1;
static int interSect = 1;
static EsoReader esoReader = new EsoReader();
static FrameNetReader frameNetReader = new FrameNetReader();
static ArrayList<String> topFrames = new ArrayList<String>();
static int fnLevel = 0;
static int esoLevel = 0;
static int climaxThreshold = 0;
static String entityFilter = "";
static Integer actorThreshold = -1;
static int topicThreshold = 0;
static int nEvents = 0;
static int nActors = 0;
static int nMentions = 0;
static int nStories = 0;
static String year = "";
static String KSSERVICE = ""; //https://knowledgestore2.fbk.eu";
static String KS = ""; //"nwr/wikinews-new";
static String KSuser = ""; //"nwr/wikinews-new";
static String KSpass = ""; //"nwr/wikinews-new";
static String EVENTSCHEMA = "";
static EuroVoc euroVoc = new EuroVoc();
static EuroVoc euroVocBlackList = new EuroVoc();
static String log = "";
static public void main (String[] args) {
String project = "NewsReader storyline";
String pathToILIfile = "";
String sparqlQuery = "";
String eventQuery = "";
String wordQuery = "";
String graspQuery = "";
String sourceQuery = "";
String entityQuery = "";
String kslimit = "500";
String pathToFtDataFile = "";
String blackListFile = "";
String fnFile = "";
String esoFile = "";
String euroVocFile = "";
String euroVocBlackListFile = "";
String pathToTokenIndex = "";
log = "";
fnLevel = 0;
esoLevel = 0;
for (int i = 0; i < args.length; i++) {
String arg = args[i];
if (arg.equals("--sparql") && args.length>(i+1)) {
sparqlQuery = args[i+1];
}
else if (arg.equals("--word") && args.length>(i+1)) {
wordQuery = args[i+1];
}
else if (arg.equals("--event") && args.length>(i+1)) {
eventQuery = args[i+1];
}
else if (arg.equals("--entity") && args.length>(i+1)) {
entityQuery = args[i+1];
}
else if (arg.equals("--tokens") && args.length>(i+1)) {
pathToTokenIndex = args[i+1];
}
else if (arg.equals("--source") && args.length>(i+1)) {
sourceQuery = args[i+1];
}
else if (arg.equals("--grasp") && args.length>(i+1)) {
graspQuery = args[i+1];
}
else if (arg.equals("--year") && args.length>(i+1)) {
year = args[i+1];
}
else if (arg.equals("--ft") && args.length>(i+1)) {
pathToFtDataFile = args[i+1];
}
else if (arg.equals("--time") && args.length>(i+1)) {
timeGran = args[i+1];
}
else if (arg.equals("--actor-intersect") && args.length>(i+1)) {
try {
interSect = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
else if (arg.equals("--action-sim") && args.length>(i+1)) {
try {
actionSim = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
else if (arg.equals("--action-ont") && args.length>(i+1)) {
actionOnt = args[i+1];
}
else if (arg.equals("--action-schema") && args.length>(i+1)) {
EVENTSCHEMA = args[i+1];
}
else if (arg.equals("--merge")) {
MERGE = true;
}
else if (arg.equals("--eurovoc") && args.length>(i+1)) {
euroVocFile = args[i+1];
euroVoc.readEuroVoc(euroVocFile,"en");
}
else if (arg.equals("--eurovoc-blacklist") && args.length>(i+1)) {
euroVocBlackListFile = args[i+1];
euroVocBlackList.readEuroVoc(euroVocBlackListFile, "en");
// System.out.println("euroVocBlackList = " + euroVocBlackList.uriLabelMap.size());
}
else if (arg.equals("--service") && args.length>(i+1)) {
KSSERVICE = args[i+1];
}
else if (arg.equals("--ks") && args.length>(i+1)) {
KS = args[i+1];
}
else if (arg.equals("--ks-user") && args.length>(i+1)) {
KSuser = args[i+1];
}
else if (arg.equals("--ks-pass") && args.length>(i+1)) {
KSpass = args[i+1];
}
else if (arg.equals("--ks-limit") && args.length>(i+1)) {
kslimit = args[i+1];
}
else if (arg.equals("--project") && args.length>(i+1)) {
project = args[i+1];
}
else if (arg.equals("--ili") && args.length>(i+1)) {
pathToILIfile = args[i+1];
}
else if (arg.equals("--black-list") && args.length>(i+1)) {
blackListFile = args[i+1];
}
else if (arg.equals("--actor-cnt") && args.length>(i+1)) {
actorThreshold = Integer.parseInt(args[i+1]);
}
else if (arg.equals("--all")){
ALL = true;
}
else if (arg.equals("--frame-relations") && args.length>(i+1)) {
fnFile = args[i+1];
}
else if (arg.equals("--frame-level") && args.length>(i+1)) {
try {
fnLevel = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
else if (arg.equals("--eso-relations") && args.length>(i+1)) {
esoFile = args[i+1];
}
else if (arg.equals("--eso-level") && args.length>(i+1)) {
try {
esoLevel = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
else if (arg.equals("--climax-level") && args.length>(i+1)) {
try {
climaxThreshold = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
else if (arg.equals("--topic-level") && args.length>(i+1)) {
try {
topicThreshold = Integer.parseInt(args[i+1]);
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
}
/*System.out.println("climaxThreshold = " + climaxThreshold);
System.out.println("topicThreshold = " + topicThreshold);
System.out.println("actionOnt = " + actionOnt);
System.out.println("actionSim = " + actionSim);
System.out.println("actorThreshold = " + actorThreshold);
System.out.println("actor interSect = " + interSect);
System.out.println("max results for KnowledgeStore = " + kslimit);
System.out.println("pathToRawTextIndexFile = " + pathToRawTextIndexFile);
System.out.println("MERGE = " + MERGE);*/
if (!blackListFile.isEmpty()) {
blacklist = Util.ReadFileToStringArrayList(blackListFile);
}
if (!fnFile.isEmpty()) {
frameNetReader.parseFile(fnFile);
topFrames = frameNetReader.getTopsFrameNetTree();
frameNetReader.flatRelations(fnLevel);
}
if (!esoFile.isEmpty()) {
esoReader.parseFile(esoFile);
}
iliMap = Util.ReadFileToStringHashMap(pathToILIfile);
if (!KSSERVICE.isEmpty()) {
if (KSuser.isEmpty()) {
TrigKSTripleReader.setServicePoint(KSSERVICE, KS);
}
else {
TrigKSTripleReader.setServicePoint(KSSERVICE, KS, KSuser, KSpass);
}
}
if (!kslimit.isEmpty()) {
TrigKSTripleReader.limit = kslimit;
}
long startTime = System.currentTimeMillis();
String ksQueryError = "";
if (!eventQuery.isEmpty()) {
log += " -- queried for event = " + eventQuery;
}
if (!entityQuery.isEmpty()) {
log += " -- queried for entity = " + entityQuery;
}
if (!wordQuery.isEmpty()) {
log += " -- queried for word = " + wordQuery;
}
if (!sourceQuery.isEmpty()) {
log += " -- queried for source = " + sourceQuery;
}
if (!year.isEmpty()) {
log += " -- queried for year = " + year;
}
if (!graspQuery.isEmpty()) {
log += " -- queried for perspective = " + graspQuery;
}
if (!Util.isSimpleQuery(args)) {
///// complex query so we combine identifiers
ArrayList<String> ids = new ArrayList<String>();
ArrayList<String> entityBasedIds = new ArrayList<String>();
ArrayList<String> yearBasedIds = new ArrayList<String>();
ArrayList<String> eventBasedIds = new ArrayList<String>();
ArrayList<String> stringBasedIds = new ArrayList<String>();
ArrayList<String> sourceBasedIds = new ArrayList<String>();
String sparql = "";
/*
System.out.println("entityQuery = " + entityQuery);
if (!entityQuery.isEmpty()) {
try {
//split query into types, instances and labels
//
String labels = TrigKSTripleReader.getLabelQueryforEntity(entityQuery);
String types = TrigKSTripleReader.getTypeQueryforEntity(entityQuery);
String instances = TrigKSTripleReader.getInstanceQueryforEntity(entityQuery);
if (!labels.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEntityLabel(labels, ids);
entityBasedIds = TrigKSTripleReader.readEventIdsFromKs(sparql);
}
if (!instances.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEntityInstance(instances, ids);
entityBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
}
if (!types.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEntityType(types, ids);
entityBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
}
ids = entityBasedIds;
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
*/
if (!eventQuery.isEmpty()) {
System.out.println("ids = " + ids.size());
System.out.println("eventQuery = " + eventQuery);
try {
//@split query into labels and types
String labels = TrigKSTripleReader.getLabelQueryforEvent(eventQuery);
String types = TrigKSTripleReader.getTypeQueryforEvent(eventQuery);
if (!labels.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEventLabel(labels, ids);
eventBasedIds = TrigKSTripleReader.readEventIdsFromKs(sparql);
}
if (!types.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEventType(types, ids);
eventBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
}
ids = eventBasedIds;
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
if (!entityQuery.isEmpty()) {
System.out.println("ids = " + ids.size());
System.out.println("entityQuery = " + entityQuery);
try {
//split query into types, instances and labels
//
String labels = TrigKSTripleReader.getLabelQueryforEntity(entityQuery);
String types = TrigKSTripleReader.getTypeQueryforEntity(entityQuery);
String instances = TrigKSTripleReader.getInstanceQueryforEntity(entityQuery);
if (!labels.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEntityLabel(labels, ids);
entityBasedIds = TrigKSTripleReader.readEventIdsFromKs(sparql);
}
if (!instances.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEntityInstance(instances, ids);
entityBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
}
if (!types.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforEntityType(types, ids);
entityBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
}
ids = entityBasedIds;
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
if (!year.isEmpty()) {
System.out.println("ids = " + ids.size());
System.out.println("year = " + year);
try {
sparql = TrigKSTripleReader.makeQueryforYears(year, ids);
ids = TrigKSTripleReader.readEventIdsFromKs(sparql);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
if (!sourceQuery.isEmpty()) {
System.out.println("ids = " + ids.size());
System.out.println("source = " + sourceQuery);
try {
String sources = TrigKSTripleReader.getsource(sourceQuery);
if (!sources.isEmpty()) {
sparql = TrigKSTripleReader.makeQueryforAuthorSurfaceForm(sources, ids);
sourceBasedIds = TrigKSTripleReader.readEventIdsFromKs(sparql);
sparql = TrigKSTripleReader.makeQueryforCitedSurfaceForm(sources, ids);
sourceBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
ids = sourceBasedIds;
}
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
if (!graspQuery.isEmpty()) {
System.out.println("ids = " + ids.size());
System.out.println("grasp = " + graspQuery);
try {
sparql = TrigKSTripleReader.makeQueryforGraspValue(graspQuery, ids);
ids = TrigKSTripleReader.readEventIdsFromKs(sparql);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
if (!wordQuery.isEmpty()) {
System.out.println("ids = " + ids.size());
System.out.println("word = " + wordQuery);
try {
sparql = TrigKSTripleReader.makeQueryforEntityLabel(wordQuery, ids);
stringBasedIds = TrigKSTripleReader.readEventIdsFromKs(sparql);
sparql = TrigKSTripleReader.makeQueryforEventLabel(wordQuery, ids);
stringBasedIds.addAll(TrigKSTripleReader.readEventIdsFromKs(sparql));
ids = stringBasedIds;
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
/* if (entityBasedIds.size()>0) {
if (ids.size()==0) { ids = entityBasedIds; }
else { ids.retainAll(entityBasedIds);}
}
if (eventBasedIds.size()>0) {
if (ids.size()==0) { ids = eventBasedIds;}
else {ids.retainAll(eventBasedIds); }
}
if (sparqlBasedIds.size()>0) {
if (ids.size()==0) {ids = sparqlBasedIds; }
else {ids.retainAll(sparqlBasedIds); }
}
if (sourceBasedIds.size()>0) {
if (ids.size()==0) {ids = sourceBasedIds; }
else { ids.retainAll(sourceBasedIds);}
}
if (perspectiveBasedIds.size()>0) {
if (ids.size()==0) { ids = perspectiveBasedIds;}
else { ids.retainAll(perspectiveBasedIds);}
}
if (stringBasedIds.size()>0) {
if (ids.size()==0) { ids = stringBasedIds; }
else { ids.retainAll(stringBasedIds); }
}*/
System.out.println("final nr. of events = " + ids.size());
if (ids.size() > 0) {
String query = TrigKSTripleReader.makeSparqlQueryForEventArrayDataFromKs(ids);
try {
TrigKSTripleReader.getEventDataFromKs(query);
} catch (Exception e) {
e.printStackTrace();
}
}
}
else {
//// we have a simple query so we get the results directly
if (!sparqlQuery.isEmpty()) {
// System.out.println(" * queried with SPARQL = " + sparqlQuery);
try {
TrigKSTripleReader.readTriplesFromKs(sparqlQuery);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
} else if (!entityQuery.isEmpty()) {
try {
TrigKSTripleReader.readTriplesFromKSforEntity(entityQuery);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
} else if (!eventQuery.isEmpty()) {
try {
TrigKSTripleReader.readTriplesFromKSforEvents(eventQuery);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
else if (!sourceQuery.isEmpty()) {
try {
TrigKSTripleReader.readTriplesFromKSforSource(sourceQuery);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
} else if (!graspQuery.isEmpty()) {
try {
TrigKSTripleReader.readTriplesFromKSforGraspValue(graspQuery);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
} else if (!wordQuery.isEmpty()) {
try {
TrigKSTripleReader.readTriplesFromKSforSurfaceString(wordQuery);
} catch (Exception e) {
ksQueryError = e.getMessage();
ksQueryError += e.getCause();
}
}
}
if (!ksQueryError.isEmpty()) {
log += " -- Error querying KnowledgeStore:"+ksQueryError;
}
else {
long estimatedTime = System.currentTimeMillis() - startTime;
log += " -- Time elapsed to get results from KS:" + estimatedTime / 1000.0;
try {
ArrayList<JSONObject> jsonObjects = JsonStoryUtil.getJSONObjectArray(TrigKSTripleReader.trigTripleData,
ALL, SKIPPEVENTS, EVENTSCHEMA, blacklist, iliMap, fnLevel, frameNetReader, topFrames, esoLevel, esoReader);
// System.out.println(" * Events in SEM-RDF = " + jsonObjects.size());
if (blacklist.size() > 0) {
jsonObjects = JsonStoryUtil.filterEventsForBlackList(jsonObjects, blacklist);
// System.out.println("Events after blacklist filter= " + jsonObjects.size());
}
if (actorThreshold > 0) {
jsonObjects = JsonStoryUtil.filterEventsForActors(jsonObjects, entityFilter, actorThreshold);
// System.out.println("Events after actor count filter = " + jsonObjects.size());
}
/*
jsonObjects = JsonStoryUtil.removePerspectiveEvents(trigTripleData, jsonObjects);
System.out.println("Events after removing perspective events = " + jsonObjects.size());
*/
jsonObjects = JsonStoryUtil.createStoryLinesForJSONArrayList(jsonObjects,
topicThreshold,
climaxThreshold,
entityFilter, MERGE,
timeGran,
actionOnt,
actionSim,
interSect);
// System.out.println("Events after storyline filter = " + jsonObjects.size());
//JsonStoryUtil.augmentEventLabelsWithArguments(jsonObjects);
JsonStoryUtil.minimalizeActors(jsonObjects);
// System.out.println("eurovoc = " + euroVoc.uriLabelMap.size());
if (euroVoc.uriLabelMap.size() > 0) {
JsonStoryUtil.renameStories(jsonObjects, euroVoc, euroVocBlackList);
}
ArrayList<JSONObject> rawTextArrayList = new ArrayList<JSONObject>();
ArrayList<JSONObject> structuredEvents = new ArrayList<JSONObject>();
if (jsonObjects.size() > 0) {
TrigKSTripleReader.integrateAttributionFromKs(jsonObjects);
}
if (!pathToFtDataFile.isEmpty()) {
HashMap<String, ArrayList<ReadFtData.DataFt>> dataFtMap = ReadFtData.readData(pathToFtDataFile);
structuredEvents = ReadFtData.convertFtDataToJsonEventArray(dataFtMap);
}
if (pathToTokenIndex.isEmpty()) {
MentionResolver.createSnippetIndexFromMentions(jsonObjects, KSSERVICE, KS, KSuser, KSpass);
}
else {
log += MentionResolver.createSnippetIndexFromMentions(jsonObjects, pathToTokenIndex);
}
nEvents = jsonObjects.size();
nActors = JsonStoryUtil.countActors(jsonObjects);
nMentions = JsonStoryUtil.countMentions(jsonObjects);
nStories = JsonStoryUtil.countGroups(jsonObjects);
JsonSerialization.writeJsonObjectArrayWithStructuredData("", "", project,
jsonObjects, rawTextArrayList, nEvents, nStories, nActors, nMentions, "polls", structuredEvents);
} catch (JSONException e) {
e.printStackTrace();
}
log += " -- story_cnt = " + nStories+ ", event_cnt = " + nEvents + ", mention_cnt = " + nMentions + ", actor_cnt = " + nActors;
}
System.out.print(log);
}
static void splitStories (ArrayList<JSONObject> events,
ArrayList<JSONObject> rawTextArrayList,
ArrayList<JSONObject> structuredEvents,
String project,
String trigFolder
) {
HashMap<String, ArrayList<JSONObject>> storyMap = new HashMap<String, ArrayList<JSONObject>>();
for (int i = 0; i < events.size(); i++) {
JSONObject event = events.get(i);
try {
String group = event.getString("group");
if (storyMap.containsKey(group)) {
ArrayList<JSONObject> groupEvents = storyMap.get(group);
groupEvents.add(event);
storyMap.put(group,groupEvents);
}
else {
ArrayList<JSONObject> groupEvents = new ArrayList<JSONObject>();
groupEvents.add(event);
storyMap.put(group,groupEvents);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
Set keySet = storyMap.keySet();
Iterator<String> keys = keySet.iterator();
while ((keys.hasNext())) {
String group = keys.next();
ArrayList<JSONObject> groupEvents = storyMap.get(group);
int nActors = JsonStoryUtil.countActors(groupEvents);
int nMentions = JsonStoryUtil.countMentions(groupEvents);
// System.out.println("group = " + group);
JsonSerialization.writeJsonObjectArrayWithStructuredData(trigFolder, group, project,
groupEvents, rawTextArrayList, groupEvents.size(), 1, nActors, nMentions, "polls", structuredEvents);
}
}
}
| |
package com.github.sonarperl.api;
import com.sonar.sslr.api.AstNode;
import com.sonar.sslr.api.TokenType;
public enum PerlKeyword implements TokenType {
ABS("abs"),
ACCEPT("accept"),
ALARM("alarm"),
AND("and"),
ATAN2("atan2"),
BEGIN("BEGIN"),
BIND("bind"),
BINMODE("binmode"),
BLESS("bless"),
CALLER("caller"),
CHDIR("chdir"),
CHECK("CHECK"),
CHMOD("chmod"),
CHOMP("chomp"),
CHOP("chop"),
CHOWN("chown"),
CHR("chr"),
CHROOT("chroot"),
CLOSE("close"),
CLOSEDIR("closedir"),
CMP("cmp"),
CONNECT("connect"),
CONTINUE("continue"),
COS("cos"),
CRYPT("crypt"),
DBMCLOSE("dbmclose"),
DBMOPEN("dbmopen"),
DEFAULT("default"),
DEFINED("defined"),
DELETE("delete"),
DIE("die"),
DO("do"),
DUMP("dump"),
EACH("each"),
ELSE("else"),
ELSIF("elsif"),
END("END"),
ENDGRENT("endgrent"),
ENDHOSTENT("endhostent"),
ENDNETENT("endnetent"),
ENDPROTOENT("endprotoent"),
ENDPWENT("endpwent"),
ENDSERVENT("endservent"),
EOF("eof"),
EQ("eq"),
EVAL("eval"),
EXEC("exec"),
EXISTS("exists"),
EXIT("exit"),
EXP("exp"),
FCNTL("fcntl"),
FILENO("fileno"),
FLOCK("flock"),
FOR("for"),
FOREACH("foreach"),
FORK("fork"),
FORMLINE("formline"),
GETC("getc"),
GETGRENT("getgrent"),
GETGRGID("getgrgid"),
GETGRNAM("getgrnam"),
GETHOSTBYADDR("gethostbyaddr"),
GETHOSTBYNAME("gethostbyname"),
GETHOSTENT("gethostent"),
GETLOGIN("getlogin"),
GETNETBYADDR("getnetbyaddr"),
GETNETBYNAME("getnetbyname"),
GETNETENT("getnetent"),
GETPEERNAME("getpeername"),
GETPGRP("getpgrp"),
GETPPID("getppid"),
GETPRIORITY("getpriority"),
GETPROTOBYNAME("getprotobyname"),
GETPROTOBYNUMBER("getprotobynumber"),
GETPROTOENT("getprotoent"),
GETPWENT("getpwent"),
GETPWNAM("getpwnam"),
GETPWUID("getpwuid"),
GETSERVBYNAME("getservbyname"),
GETSERVBYPORT("getservbyport"),
GETSERVENT("getservent"),
GETSOCKNAME("getsockname"),
GETSOCKOPT("getsockopt"),
GIVEN("given"),
GLOB("glob"),
GMTIME("gmtime"),
GOTO("goto"),
GREP("grep"),
GE("ge"),
GT("gt"),
HEX("hex"),
IF("if"),
IMPORT("import"),
INDEX("index"),
INIT("INIT"),
INT("int"),
IOCTL("ioctl"),
JOIN("join"),
KEYS("keys"),
KILL("kill"),
LAST("last"),
LC("lc"),
LCFIRST("lcfirst"),
LENGTH("length"),
LINK("link"),
LISTEN("listen"),
LOCAL("local"),
LOCALTIME("localtime"),
LOG("log"),
LSTAT("lstat"),
LE("le"),
LT("lt"),
MAP("map"),
MKDIR("mkdir"),
MSGCTL("msgctl"),
MSGGET("msgget"),
MSGRCV("msgrcv"),
MSGSND("msgsnd"),
MY("my"),
NEW("new"),
NEXT("next"),
NO("no"),
NOT("not"),
OCT("oct"),
OPEN("open"),
OPENDIR("opendir"),
OR("or"),
ORD("ord"),
OUR("our"),
PACK("pack"),
PACKAGE("package"),
PIPE("pipe"),
POP("pop"),
POS("pos"),
PRINT("print"),
PRINTF("printf"),
PUSH("push"),
QUOTEMETA("quotemeta"),
RAND("rand"),
READ("read"),
READDIR("readdir"),
READLINK("readlink"),
RECV("recv"),
REDO("redo"),
REF("ref"),
RENAME("rename"),
REQUIRE("require"),
RESET("reset"),
RETURN("return"),
REVERSE("reverse"),
REWINDDIR("rewinddir"),
RINDEX("rindex"),
RMDIR("rmdir"),
SAY("say"),
SCALAR("scalar"),
SEEK("seek"),
SEEKDIR("seekdir"),
SELECT("select"),
SEMCTL("semctl"),
SEMGET("semget"),
SEMOP("semop"),
SEND("send"),
SETGRENT("setgrent"),
SETHOSTENT("sethostent"),
SETNETENT("setnetent"),
SETPGRP("setpgrp"),
SETPRIORITY("setpriority"),
SETPROTOENT("setprotoent"),
SETPWENT("setpwent"),
SETSERVENT("setservent"),
SETSOCKOPT("setsockopt"),
SHIFT("shift"),
SHMCTL("shmctl"),
SHMGET("shmget"),
SHMREAD("shmread"),
SHMWRITE("shmwrite"),
SHUTDOWN("shutdown"),
SIN("sin"),
SLEEP("sleep"),
SOCKET("socket"),
SOCKETPAIR("socketpair"),
SORT("sort"),
SPLICE("splice"),
SPLIT("split"),
SPRINTF("sprintf"),
SQRT("sqrt"),
SRAND("srand"),
STAT("stat"),
STATE("state"),
STUDY("study"),
SUB("sub"),
SUBSTR("substr"),
SYMLINK("symlink"),
SYSCALL("syscall"),
SYSREAD("sysread"),
SYSSEEK("sysseek"),
SYSTEM("system"),
SYSWRITE("syswrite"),
TELL("tell"),
TELLDIR("telldir"),
TIE("tie"),
TIED("tied"),
TIME("time"),
TIMES("times"),
TRUNCATE("truncate"),
UC("uc"),
UCFIRST("ucfirst"),
UMASK("umask"),
UNDEF("undef"),
UNITCHECK("UNITCHECK"),
UNLESS("unless"),
UNLINK("unlink"),
UNPACK("unpack"),
UNSHIFT("unshift"),
UNTIE("untie"),
UNTIL("until"),
USE("use"),
UTIME("utime"),
VALUES("values"),
VEC("vec"),
WAIT("wait"),
WAITPID("waitpid"),
WANTARRAY("wantarray"),
WARN("warn"),
WHEN("when"),
WHILE("while"),
WRITE("write"),
XOR("xor");
private final String value;
PerlKeyword(String value) {
this.value = value;
}
@Override
public String getName() {
return name();
}
@Override
public String getValue() {
return value;
}
@Override
public boolean hasToBeSkippedFromAst(AstNode node) {
return false;
}
public static String[] keywordValues() {
PerlKeyword[] keywordsEnum = PerlKeyword.values();
String[] keywords = new String[keywordsEnum.length];
for (int i = 0; i < keywords.length; i++) {
keywords[i] = keywordsEnum[i].getValue();
}
return keywords;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/websecurityscanner/v1beta/finding_addon.proto
package com.google.cloud.websecurityscanner.v1beta;
/**
*
*
* <pre>
* Information reported for an outdated library.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.OutdatedLibrary}
*/
public final class OutdatedLibrary extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1beta.OutdatedLibrary)
OutdatedLibraryOrBuilder {
private static final long serialVersionUID = 0L;
// Use OutdatedLibrary.newBuilder() to construct.
private OutdatedLibrary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OutdatedLibrary() {
libraryName_ = "";
version_ = "";
learnMoreUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OutdatedLibrary();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private OutdatedLibrary(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
libraryName_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
version_ = s;
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
learnMoreUrls_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
learnMoreUrls_.add(s);
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
learnMoreUrls_ = learnMoreUrls_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.FindingAddonProto
.internal_static_google_cloud_websecurityscanner_v1beta_OutdatedLibrary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.FindingAddonProto
.internal_static_google_cloud_websecurityscanner_v1beta_OutdatedLibrary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.class,
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.Builder.class);
}
public static final int LIBRARY_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object libraryName_;
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @return The libraryName.
*/
@java.lang.Override
public java.lang.String getLibraryName() {
java.lang.Object ref = libraryName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
libraryName_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @return The bytes for libraryName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLibraryNameBytes() {
java.lang.Object ref = libraryName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
libraryName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 2;
private volatile java.lang.Object version_;
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LEARN_MORE_URLS_FIELD_NUMBER = 3;
private com.google.protobuf.LazyStringList learnMoreUrls_;
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @return A list containing the learnMoreUrls.
*/
public com.google.protobuf.ProtocolStringList getLearnMoreUrlsList() {
return learnMoreUrls_;
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @return The count of learnMoreUrls.
*/
public int getLearnMoreUrlsCount() {
return learnMoreUrls_.size();
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param index The index of the element to return.
* @return The learnMoreUrls at the given index.
*/
public java.lang.String getLearnMoreUrls(int index) {
return learnMoreUrls_.get(index);
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the learnMoreUrls at the given index.
*/
public com.google.protobuf.ByteString getLearnMoreUrlsBytes(int index) {
return learnMoreUrls_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(libraryName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, libraryName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_);
}
for (int i = 0; i < learnMoreUrls_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, learnMoreUrls_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(libraryName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, libraryName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_);
}
{
int dataSize = 0;
for (int i = 0; i < learnMoreUrls_.size(); i++) {
dataSize += computeStringSizeNoTag(learnMoreUrls_.getRaw(i));
}
size += dataSize;
size += 1 * getLearnMoreUrlsList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary)) {
return super.equals(obj);
}
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary other =
(com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary) obj;
if (!getLibraryName().equals(other.getLibraryName())) return false;
if (!getVersion().equals(other.getVersion())) return false;
if (!getLearnMoreUrlsList().equals(other.getLearnMoreUrlsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + LIBRARY_NAME_FIELD_NUMBER;
hash = (53 * hash) + getLibraryName().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
if (getLearnMoreUrlsCount() > 0) {
hash = (37 * hash) + LEARN_MORE_URLS_FIELD_NUMBER;
hash = (53 * hash) + getLearnMoreUrlsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information reported for an outdated library.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.OutdatedLibrary}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1beta.OutdatedLibrary)
com.google.cloud.websecurityscanner.v1beta.OutdatedLibraryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.FindingAddonProto
.internal_static_google_cloud_websecurityscanner_v1beta_OutdatedLibrary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.FindingAddonProto
.internal_static_google_cloud_websecurityscanner_v1beta_OutdatedLibrary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.class,
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.Builder.class);
}
// Construct using com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
libraryName_ = "";
version_ = "";
learnMoreUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.websecurityscanner.v1beta.FindingAddonProto
.internal_static_google_cloud_websecurityscanner_v1beta_OutdatedLibrary_descriptor;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary getDefaultInstanceForType() {
return com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary build() {
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary buildPartial() {
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary result =
new com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary(this);
int from_bitField0_ = bitField0_;
result.libraryName_ = libraryName_;
result.version_ = version_;
if (((bitField0_ & 0x00000001) != 0)) {
learnMoreUrls_ = learnMoreUrls_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000001);
}
result.learnMoreUrls_ = learnMoreUrls_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary) {
return mergeFrom((com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary other) {
if (other == com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary.getDefaultInstance())
return this;
if (!other.getLibraryName().isEmpty()) {
libraryName_ = other.libraryName_;
onChanged();
}
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
onChanged();
}
if (!other.learnMoreUrls_.isEmpty()) {
if (learnMoreUrls_.isEmpty()) {
learnMoreUrls_ = other.learnMoreUrls_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureLearnMoreUrlsIsMutable();
learnMoreUrls_.addAll(other.learnMoreUrls_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object libraryName_ = "";
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @return The libraryName.
*/
public java.lang.String getLibraryName() {
java.lang.Object ref = libraryName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
libraryName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @return The bytes for libraryName.
*/
public com.google.protobuf.ByteString getLibraryNameBytes() {
java.lang.Object ref = libraryName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
libraryName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @param value The libraryName to set.
* @return This builder for chaining.
*/
public Builder setLibraryName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
libraryName_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearLibraryName() {
libraryName_ = getDefaultInstance().getLibraryName();
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the outdated library.
* </pre>
*
* <code>string library_name = 1;</code>
*
* @param value The bytes for libraryName to set.
* @return This builder for chaining.
*/
public Builder setLibraryNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
libraryName_ = value;
onChanged();
return this;
}
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
onChanged();
return this;
}
/**
*
*
* <pre>
* The version number.
* </pre>
*
* <code>string version = 2;</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList learnMoreUrls_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureLearnMoreUrlsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
learnMoreUrls_ = new com.google.protobuf.LazyStringArrayList(learnMoreUrls_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @return A list containing the learnMoreUrls.
*/
public com.google.protobuf.ProtocolStringList getLearnMoreUrlsList() {
return learnMoreUrls_.getUnmodifiableView();
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @return The count of learnMoreUrls.
*/
public int getLearnMoreUrlsCount() {
return learnMoreUrls_.size();
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param index The index of the element to return.
* @return The learnMoreUrls at the given index.
*/
public java.lang.String getLearnMoreUrls(int index) {
return learnMoreUrls_.get(index);
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the learnMoreUrls at the given index.
*/
public com.google.protobuf.ByteString getLearnMoreUrlsBytes(int index) {
return learnMoreUrls_.getByteString(index);
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param index The index to set the value at.
* @param value The learnMoreUrls to set.
* @return This builder for chaining.
*/
public Builder setLearnMoreUrls(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLearnMoreUrlsIsMutable();
learnMoreUrls_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param value The learnMoreUrls to add.
* @return This builder for chaining.
*/
public Builder addLearnMoreUrls(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureLearnMoreUrlsIsMutable();
learnMoreUrls_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param values The learnMoreUrls to add.
* @return This builder for chaining.
*/
public Builder addAllLearnMoreUrls(java.lang.Iterable<java.lang.String> values) {
ensureLearnMoreUrlsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, learnMoreUrls_);
onChanged();
return this;
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearLearnMoreUrls() {
learnMoreUrls_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* URLs to learn more information about the vulnerabilities in the library.
* </pre>
*
* <code>repeated string learn_more_urls = 3;</code>
*
* @param value The bytes of the learnMoreUrls to add.
* @return This builder for chaining.
*/
public Builder addLearnMoreUrlsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureLearnMoreUrlsIsMutable();
learnMoreUrls_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1beta.OutdatedLibrary)
}
// @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.OutdatedLibrary)
private static final com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary();
}
public static com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OutdatedLibrary> PARSER =
new com.google.protobuf.AbstractParser<OutdatedLibrary>() {
@java.lang.Override
public OutdatedLibrary parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new OutdatedLibrary(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<OutdatedLibrary> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OutdatedLibrary> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.OutdatedLibrary getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v8.services;
import com.google.ads.googleads.v8.resources.AccountBudgetProposal;
import com.google.ads.googleads.v8.services.stub.AccountBudgetProposalServiceStubSettings;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link AccountBudgetProposalServiceClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li> The default service address (googleads.googleapis.com) and default port (443) are used.
* <li> Credentials are acquired automatically through Application Default Credentials.
* <li> Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of getAccountBudgetProposal to 30 seconds:
*
* <pre>{@code
* AccountBudgetProposalServiceSettings.Builder accountBudgetProposalServiceSettingsBuilder =
* AccountBudgetProposalServiceSettings.newBuilder();
* accountBudgetProposalServiceSettingsBuilder
* .getAccountBudgetProposalSettings()
* .setRetrySettings(
* accountBudgetProposalServiceSettingsBuilder
* .getAccountBudgetProposalSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* AccountBudgetProposalServiceSettings accountBudgetProposalServiceSettings =
* accountBudgetProposalServiceSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class AccountBudgetProposalServiceSettings
extends ClientSettings<AccountBudgetProposalServiceSettings> {
/** Returns the object with the settings used for calls to getAccountBudgetProposal. */
public UnaryCallSettings<GetAccountBudgetProposalRequest, AccountBudgetProposal>
getAccountBudgetProposalSettings() {
return ((AccountBudgetProposalServiceStubSettings) getStubSettings())
.getAccountBudgetProposalSettings();
}
/** Returns the object with the settings used for calls to mutateAccountBudgetProposal. */
public UnaryCallSettings<MutateAccountBudgetProposalRequest, MutateAccountBudgetProposalResponse>
mutateAccountBudgetProposalSettings() {
return ((AccountBudgetProposalServiceStubSettings) getStubSettings())
.mutateAccountBudgetProposalSettings();
}
public static final AccountBudgetProposalServiceSettings create(
AccountBudgetProposalServiceStubSettings stub) throws IOException {
return new AccountBudgetProposalServiceSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return AccountBudgetProposalServiceStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return AccountBudgetProposalServiceStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return AccountBudgetProposalServiceStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return AccountBudgetProposalServiceStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return AccountBudgetProposalServiceStubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return AccountBudgetProposalServiceStubSettings.defaultTransportChannelProvider();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return AccountBudgetProposalServiceStubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected AccountBudgetProposalServiceSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for AccountBudgetProposalServiceSettings. */
public static class Builder
extends ClientSettings.Builder<AccountBudgetProposalServiceSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(AccountBudgetProposalServiceStubSettings.newBuilder(clientContext));
}
protected Builder(AccountBudgetProposalServiceSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(AccountBudgetProposalServiceStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(AccountBudgetProposalServiceStubSettings.newBuilder());
}
public AccountBudgetProposalServiceStubSettings.Builder getStubSettingsBuilder() {
return ((AccountBudgetProposalServiceStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to getAccountBudgetProposal. */
public UnaryCallSettings.Builder<GetAccountBudgetProposalRequest, AccountBudgetProposal>
getAccountBudgetProposalSettings() {
return getStubSettingsBuilder().getAccountBudgetProposalSettings();
}
/** Returns the builder for the settings used for calls to mutateAccountBudgetProposal. */
public UnaryCallSettings.Builder<
MutateAccountBudgetProposalRequest, MutateAccountBudgetProposalResponse>
mutateAccountBudgetProposalSettings() {
return getStubSettingsBuilder().mutateAccountBudgetProposalSettings();
}
@Override
public AccountBudgetProposalServiceSettings build() throws IOException {
return new AccountBudgetProposalServiceSettings(this);
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticsearch.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Container for the parameters to the <code> <a>ListElasticsearchInstanceTypes</a> </code> operation.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListElasticsearchInstanceTypesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
* </p>
*/
private String elasticsearchVersion;
/**
* <p>
* DomainName represents the name of the Domain that we are trying to modify. This should be present only if we are
* querying for list of available Elasticsearch instance types when modifying existing domain.
* </p>
*/
private String domainName;
/**
* <p>
* Set this value to limit the number of results returned. Value provided must be greater than 30 else it wont be
* honored.
* </p>
*/
private Integer maxResults;
/**
* <p>
* NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
* </p>
*/
private String nextToken;
/**
* <p>
* Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
* </p>
*
* @param elasticsearchVersion
* Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
*/
public void setElasticsearchVersion(String elasticsearchVersion) {
this.elasticsearchVersion = elasticsearchVersion;
}
/**
* <p>
* Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
* </p>
*
* @return Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
*/
public String getElasticsearchVersion() {
return this.elasticsearchVersion;
}
/**
* <p>
* Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
* </p>
*
* @param elasticsearchVersion
* Version of Elasticsearch for which list of supported elasticsearch instance types are needed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListElasticsearchInstanceTypesRequest withElasticsearchVersion(String elasticsearchVersion) {
setElasticsearchVersion(elasticsearchVersion);
return this;
}
/**
* <p>
* DomainName represents the name of the Domain that we are trying to modify. This should be present only if we are
* querying for list of available Elasticsearch instance types when modifying existing domain.
* </p>
*
* @param domainName
* DomainName represents the name of the Domain that we are trying to modify. This should be present only if
* we are querying for list of available Elasticsearch instance types when modifying existing domain.
*/
public void setDomainName(String domainName) {
this.domainName = domainName;
}
/**
* <p>
* DomainName represents the name of the Domain that we are trying to modify. This should be present only if we are
* querying for list of available Elasticsearch instance types when modifying existing domain.
* </p>
*
* @return DomainName represents the name of the Domain that we are trying to modify. This should be present only if
* we are querying for list of available Elasticsearch instance types when modifying existing domain.
*/
public String getDomainName() {
return this.domainName;
}
/**
* <p>
* DomainName represents the name of the Domain that we are trying to modify. This should be present only if we are
* querying for list of available Elasticsearch instance types when modifying existing domain.
* </p>
*
* @param domainName
* DomainName represents the name of the Domain that we are trying to modify. This should be present only if
* we are querying for list of available Elasticsearch instance types when modifying existing domain.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListElasticsearchInstanceTypesRequest withDomainName(String domainName) {
setDomainName(domainName);
return this;
}
/**
* <p>
* Set this value to limit the number of results returned. Value provided must be greater than 30 else it wont be
* honored.
* </p>
*
* @param maxResults
* Set this value to limit the number of results returned. Value provided must be greater than 30 else it
* wont be honored.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* Set this value to limit the number of results returned. Value provided must be greater than 30 else it wont be
* honored.
* </p>
*
* @return Set this value to limit the number of results returned. Value provided must be greater than 30 else it
* wont be honored.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* Set this value to limit the number of results returned. Value provided must be greater than 30 else it wont be
* honored.
* </p>
*
* @param maxResults
* Set this value to limit the number of results returned. Value provided must be greater than 30 else it
* wont be honored.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListElasticsearchInstanceTypesRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
* </p>
*
* @param nextToken
* NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
* </p>
*
* @return NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
* </p>
*
* @param nextToken
* NextToken should be sent in case if earlier API call produced result containing NextToken. It is used for
* pagination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListElasticsearchInstanceTypesRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getElasticsearchVersion() != null)
sb.append("ElasticsearchVersion: ").append(getElasticsearchVersion()).append(",");
if (getDomainName() != null)
sb.append("DomainName: ").append(getDomainName()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListElasticsearchInstanceTypesRequest == false)
return false;
ListElasticsearchInstanceTypesRequest other = (ListElasticsearchInstanceTypesRequest) obj;
if (other.getElasticsearchVersion() == null ^ this.getElasticsearchVersion() == null)
return false;
if (other.getElasticsearchVersion() != null && other.getElasticsearchVersion().equals(this.getElasticsearchVersion()) == false)
return false;
if (other.getDomainName() == null ^ this.getDomainName() == null)
return false;
if (other.getDomainName() != null && other.getDomainName().equals(this.getDomainName()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getElasticsearchVersion() == null) ? 0 : getElasticsearchVersion().hashCode());
hashCode = prime * hashCode + ((getDomainName() == null) ? 0 : getDomainName().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListElasticsearchInstanceTypesRequest clone() {
return (ListElasticsearchInstanceTypesRequest) super.clone();
}
}
| |
/*******************************************************************************
*
* Pentaho Data Profiling
*
* Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.profiling.core.integration.tests;
import com.jayway.restassured.RestAssured;
import com.jayway.restassured.http.ContentType;
import com.jayway.restassured.mapper.ObjectMapperDeserializationContext;
import com.jayway.restassured.mapper.ObjectMapperSerializationContext;
import org.pentaho.model.metrics.contributor.metricManager.impl.NumericMetricContributor;
import org.pentaho.profiling.api.ProfileStatus;
import org.pentaho.profiling.api.action.ProfileActionException;
import org.pentaho.profiling.api.configuration.ProfileConfiguration;
import org.pentaho.profiling.api.configuration.core.AggregateProfileMetadata;
import org.pentaho.profiling.api.configuration.core.StreamingProfileMetadata;
import org.pentaho.profiling.api.dto.ProfileStatusDTO;
import org.pentaho.profiling.api.json.ObjectMapperFactory;
import org.pentaho.profiling.api.metrics.MetricContributor;
import org.pentaho.profiling.api.metrics.MetricContributors;
import org.pentaho.profiling.api.metrics.MetricManagerContributor;
import org.pentaho.profiling.api.metrics.field.DataSourceFieldValue;
import org.pentaho.profiling.api.metrics.univariate.MetricManagerBasedMetricContributor;
import org.pentaho.profiling.model.ProfileStatusImpl;
import org.pentaho.profiling.core.integration.tests.utils.DataProfilingService;
import org.pentaho.profiling.core.integration.tests.utils.ProfileStatusValidationUtil;
import org.pentaho.profiling.services.AggregateAddChildWrapper;
import org.pentaho.profiling.services.AggregateProfileDTO;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static com.jayway.restassured.RestAssured.given;
import static org.pentaho.profiling.core.integration.tests.utils.DataSourceFieldValueUtils.createDataSourceFieldValues;
import static org.pentaho.profiling.core.integration.tests.utils.DataSourceFieldValueUtils.createRecordList;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Created by bryan on 3/27/15.
*/
public class AggregateProfileTest {
private static DataProfilingService dataProfilingService;
private static int origPort;
@BeforeClass
public static void startup() throws Exception {
dataProfilingService = new DataProfilingService();
dataProfilingService.start();
origPort = RestAssured.port;
RestAssured.port = 8181;
// Configure object mapper
final ObjectMapper typeObjectMapper =
new ObjectMapperFactory( AggregateProfileTest.class.getClassLoader() ).createMapper();
RestAssured.objectMapper( new com.jayway.restassured.mapper.ObjectMapper() {
@Override public Object deserialize( ObjectMapperDeserializationContext context ) {
try {
return typeObjectMapper.readValue( context.getDataToDeserialize().asByteArray(), context.getType() );
} catch ( IOException e ) {
e.printStackTrace();
return null;
}
}
@Override public Object serialize( ObjectMapperSerializationContext context ) {
try {
return typeObjectMapper.writeValueAsString( context.getObjectToSerialize() );
} catch ( IOException e ) {
e.printStackTrace();
return null;
}
}
} );
}
@AfterClass
public static void shutdown() throws IOException {
dataProfilingService.stop();
RestAssured.port = origPort;
}
@Test
public void testAggregate() throws IOException, InterruptedException, ProfileActionException {
// Configure metric contributors
List<MetricContributor> metricContributorList = new ArrayList<MetricContributor>();
List<MetricManagerContributor> metricManagerContributorList = new ArrayList<MetricManagerContributor>();
metricManagerContributorList.add( new NumericMetricContributor() );
metricContributorList.add( new MetricManagerBasedMetricContributor( metricManagerContributorList ) );
MetricContributors metricContributors = new MetricContributors();
metricContributors.setMetricManagerContributors( metricManagerContributorList );
// Create aggregate profiles
ProfileConfiguration profileCreateRequest = new ProfileConfiguration();
String topAggregateName = "test-top-aggregate";
profileCreateRequest.setDataSourceMetadata( new AggregateProfileMetadata( topAggregateName ) );
profileCreateRequest.setMetricContributors( metricContributors );
String topAggregateId =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
String subAggregateName = "test-sub-aggregate";
profileCreateRequest.setDataSourceMetadata( new AggregateProfileMetadata( subAggregateName ) );
String subAggregateId =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
// Create streaming profiles
profileCreateRequest = new ProfileConfiguration();
profileCreateRequest.setMetricContributors( metricContributors );
profileCreateRequest.setDataSourceMetadata( new StreamingProfileMetadata( "stream1" ) );
String stream1Id =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
profileCreateRequest.setDataSourceMetadata( new StreamingProfileMetadata( "stream2" ) );
String stream2Id =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
profileCreateRequest.setDataSourceMetadata( new StreamingProfileMetadata( "stream3" ) );
String stream3Id =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
profileCreateRequest.setDataSourceMetadata( new StreamingProfileMetadata( "stream4" ) );
String stream4Id =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
profileCreateRequest.setDataSourceMetadata( new StreamingProfileMetadata( "stream5" ) );
String stream5Id =
given().contentType( ContentType.JSON ).body( profileCreateRequest ).post(
"/cxf/profile" ).then().contentType( ContentType.JSON ).extract().path( "id" );
// Add subAggregate to top level aggregate
given().contentType( ContentType.JSON ).body( new AggregateAddChildWrapper( topAggregateId, subAggregateId ) ).post(
"/cxf/aggregate/add" ).then().assertThat().statusCode( 204 );
// Add stream 1 and 2 to top level aggregate
given().contentType( ContentType.JSON ).body( new AggregateAddChildWrapper( topAggregateId, stream1Id ) ).post(
"/cxf/aggregate/add" ).then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).body( new AggregateAddChildWrapper( topAggregateId, stream2Id ) ).post(
"/cxf/aggregate/add" ).then().assertThat().statusCode( 204 );
// Add stream 3 and 4 to sub aggregate (stream 5 will be an orphan)
given().contentType( ContentType.JSON ).body( new AggregateAddChildWrapper( subAggregateId, stream3Id ) ).post(
"/cxf/aggregate/add" ).then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).body( new AggregateAddChildWrapper( subAggregateId, stream4Id ) ).post(
"/cxf/aggregate/add" ).then().assertThat().statusCode( 204 );
// Check aggregate list, we expect top, sub aggregate, and they should have their children
List<AggregateProfileDTO> aggregates =
given().contentType( ContentType.JSON ).get( "/cxf/aggregate" ).then().contentType( ContentType.JSON ).extract()
.as( List.class );
Set<String> aggregateIds = new HashSet<String>();
for ( AggregateProfileDTO aggregate : aggregates ) {
String id = aggregate.getId();
aggregateIds.add( id );
List<AggregateProfileDTO> aggregateChildren = aggregate.getChildProfiles();
Set<String> children = new HashSet<String>( aggregateChildren.size() );
for ( AggregateProfileDTO aggregateChild : aggregateChildren ) {
children.add( aggregateChild.getId() );
}
if ( topAggregateId.equals( id ) ) {
assertEquals( topAggregateName, aggregate.getName() );
assertTrue( children.contains( subAggregateId ) );
assertTrue( children.contains( stream1Id ) );
assertTrue( children.contains( stream2Id ) );
assertFalse( children.contains( stream3Id ) );
assertFalse( children.contains( stream4Id ) );
assertFalse( children.contains( stream5Id ) );
} else if ( subAggregateId.equals( id ) ) {
assertEquals( subAggregateName, aggregate.getName() );
assertFalse( children.contains( stream1Id ) );
assertFalse( children.contains( stream2Id ) );
assertTrue( children.contains( stream3Id ) );
assertTrue( children.contains( stream4Id ) );
assertFalse( children.contains( stream5Id ) );
}
}
assertTrue( aggregateIds.contains( topAggregateId ) );
assertFalse( aggregateIds.contains( subAggregateId ) );
assertFalse( aggregateIds.contains( stream1Id ) );
assertFalse( aggregateIds.contains( stream2Id ) );
assertFalse( aggregateIds.contains( stream3Id ) );
assertFalse( aggregateIds.contains( stream4Id ) );
assertFalse( aggregateIds.contains( stream5Id ) );
// Ensure that the aggregate returned for any children is that of the top level
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + topAggregateId ).then()
.contentType( ContentType.JSON )
.body( "id", equalTo( topAggregateId ) );
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + subAggregateId ).then()
.contentType( ContentType.JSON )
.body( "id", equalTo( topAggregateId ) );
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + stream1Id ).then().contentType( ContentType.JSON )
.body( "id", equalTo( topAggregateId ) );
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + stream2Id ).then().contentType( ContentType.JSON )
.body( "id", equalTo( topAggregateId ) );
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + stream3Id ).then().contentType( ContentType.JSON )
.body( "id", equalTo( topAggregateId ) );
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + stream4Id ).then().contentType( ContentType.JSON )
.body( "id", equalTo( topAggregateId ) );
// Stream 5 should have no content for aggregate profile
given().contentType( ContentType.JSON ).get( "/cxf/aggregate/" + stream5Id ).then().assertThat().statusCode( 204 );
// Send data into the streams
String numberPhysicalName = "test1";
String numberLogicalName = "test2";
List<List<DataSourceFieldValue>> stream1Records =
createRecordList( createDataSourceFieldValues( numberPhysicalName, numberLogicalName, 1, 3, 5, 7, 9, 11 ) );
given().contentType( ContentType.JSON ).body( stream1Records ).post( "/cxf/streaming/processRecords/" + stream1Id )
.then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).put( "/cxf/profile/stop/" + stream1Id ).then().assertThat()
.statusCode( 204 );
List<List<DataSourceFieldValue>> stream2Records =
createRecordList( createDataSourceFieldValues( numberPhysicalName, numberLogicalName, 2, 4, 6, 8, 10, 12 ) );
given().contentType( ContentType.JSON ).body( stream2Records ).post( "/cxf/streaming/processRecords/" + stream2Id )
.then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).put( "/cxf/profile/stop/" + stream2Id ).then().assertThat().statusCode(
204 );
List<List<DataSourceFieldValue>> stream3Records =
createRecordList( createDataSourceFieldValues( numberPhysicalName, numberLogicalName, 13, 14 ) );
given().contentType( ContentType.JSON ).body( stream3Records ).post( "/cxf/streaming/processRecords/" + stream3Id )
.then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).put( "/cxf/profile/stop/" + stream3Id ).then().assertThat().statusCode(
204 );
List<List<DataSourceFieldValue>> stream4Records =
createRecordList( createDataSourceFieldValues( numberPhysicalName, numberLogicalName, 15, 16 ) );
given().contentType( ContentType.JSON ).body( stream4Records ).post( "/cxf/streaming/processRecords/" + stream4Id )
.then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).put( "/cxf/profile/stop/" + stream4Id ).then().assertThat().statusCode(
204 );
// Stream 5 shouldn't affect our aggregate results
List<List<DataSourceFieldValue>> stream5Records =
createRecordList( createDataSourceFieldValues( numberPhysicalName, numberLogicalName, 1516 ) );
given().contentType( ContentType.JSON ).body( stream5Records ).post( "/cxf/streaming/processRecords/" + stream5Id )
.then().assertThat().statusCode( 204 );
given().contentType( ContentType.JSON ).put( "/cxf/profile/stop/" + stream5Id ).then().assertThat()
.statusCode( 204 );
ProfileStatus stream1Status = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + stream1Id ).then().contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatus stream2Status = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + stream2Id ).then().contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatus stream3Status = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + stream3Id ).then().contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatus stream4Status = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + stream4Id ).then().contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatus stream5Status = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + stream5Id ).then().contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( stream1Status, metricContributorList, stream1Records );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( stream2Status, metricContributorList, stream2Records );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( stream3Status, metricContributorList, stream3Records );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( stream4Status, metricContributorList, stream4Records );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( stream5Status, metricContributorList, stream5Records );
Thread.sleep( 1100 );
List<List<DataSourceFieldValue>> subAggregateRecords = new ArrayList<List<DataSourceFieldValue>>( stream3Records );
subAggregateRecords.addAll( stream4Records );
ProfileStatus subAggregateStatus = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + subAggregateId ).then()
.contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( subAggregateStatus, metricContributorList, subAggregateRecords );
Thread.sleep( 1100 );
List<List<DataSourceFieldValue>> topAggregateRecords = new ArrayList<List<DataSourceFieldValue>>( stream1Records );
topAggregateRecords.addAll( stream2Records );
topAggregateRecords.addAll( subAggregateRecords );
ProfileStatus topAggregateStatus = new ProfileStatusImpl(
given().contentType( ContentType.JSON ).get( "/cxf/profile/" + topAggregateId ).then()
.contentType( ContentType.JSON )
.extract().as( ProfileStatusDTO.class ) );
ProfileStatusValidationUtil
.validateProfileFieldsAgainstRecords( topAggregateStatus, metricContributorList, topAggregateRecords );
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2017, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2009/07/01 Martin D. Flynn
// -Initial release
// ----------------------------------------------------------------------------
package org.opengts.db;
import java.lang.*;
import java.util.*;
import java.io.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.tables.*;
public class ReportURL
{
// ------------------------------------------------------------------------
public static final String RPTARG_ACCOUNT = "account"; // Constants.PARM_ACCOUNT
public static final String RPTARG_USER = "user"; // Constants.PARM_USER
public static final String RPTARG_ENCPASS = "encpass"; // Constants.PARM_ENCPASS
public static final String RPTARG_DEVICE = "device"; // Constants.PARM_DEVICE
public static final String RPTARG_GROUP = "group"; // Constants.PARM_GROUP
public static final String RPTARG_DATE_FR[] = new String[] { "date_fr" , "fr" }; // Calendar.PARM_RANGE_FR
public static final String RPTARG_DATE_TO[] = new String[] { "date_to" , "to" }; // Calendar.PARM_RANGE_TO
public static final String RPTARG_DATE_TZ[] = new String[] { "date_tz" , "tz" }; // Calendar.PARM_TIMEZONE
public static final String RPTARG_REPORT[] = new String[] { "r_report" , "rpt" };
public static final String RPTARG_LIMIT[] = new String[] { "r_limit" , "lim" };
public static final String RPTARG_LIMIT_TYPE[] = new String[] { "r_limType" , "ltp" };
public static final String RPTARG_FORMAT[] = new String[] { "r_format" , "fmt" };
public static final String RPTARG_EMAIL[] = new String[] { "r_emailAddr" , "email" };
public static final String URLARG_RTP = "rtp_";
// ------------------------------------------------------------------------
// -- formats
public static final String FORMAT_HTML = "html";
public static final String FORMAT_XML = "xml";
public static final String FORMAT_CSV = "csv";
public static final String FORMAT_PDF = "pdf";
public static final String FORMAT_XLS = "xls";
public static final String FORMAT_XLSX = "xlsx";
public static final String FORMAT_TXT = "txt";
public static final String FORMAT_SOAPXML = "soapxml";
public static final String FORMAT_EHTML = "ehtml"; // embedded HTML (no external links)
public static final String FORMAT_CUSTOM = "custom";
public static final String FORMAT_SCHEDULE = "sched";
public static final String FORMAT_URL = "url";
public static final String FORMAT_EMAIL = "email";
public static final String FORMAT_CALLBACK = "callback";
// -- ACLs
public static final String _ACL_FORMAT_NON_HTML = "nonHtml"; // non-html formats
public static final String _ACL_FORMAT_HTML = "html"; // always enabled
public static final String _ACL_FORMAT_EMAIL = "email"; // ReportURL.FORMAT_EHTML
public static final String _ACL_FORMAT_CSV = "csv"; // ReportURL.FORMAT_CSV
public static final String _ACL_FORMAT_XML = "xml"; // ReportURL.FORMAT_XML
public static final String _ACL_FORMAT_XLS = "xls"; // ReportURL.FORMAT_XLS
public static final String _ACL_FORMAT_SCHEDULE = "sched"; // ReportURL.FORMAT_SCHEDULE (not yet supported)
public static final String _ACL_FORMAT_PDF = "pdf"; // ReportURL.FORMAT_PDF (not yet supported)
public static final String _ACL_FORMAT_CUSTOM = "custom"; // ReportURL.FORMAT_CUSTOM (not yet supported)
public static final String _ACL_LIST[] = new String[] {
//_ACL_FORMAT_HTML, <-- always enabled
_ACL_FORMAT_EMAIL,
_ACL_FORMAT_CSV,
_ACL_FORMAT_XML,
_ACL_FORMAT_XLS,
//_ACL_FORMAT_SCHEDULE, <-- not yet supported
//_ACL_FORMAT_PDF, <-- not yet supported
//_ACL_FORMAT_CUSTOM, <-- not yet supported
};
public enum Format implements EnumTools.IntValue, EnumTools.StringValue {
HTML ( 0, ReportURL.FORMAT_HTML , null ), // MIME: "text/html" (default)
XML ( 1, ReportURL.FORMAT_XML , _ACL_FORMAT_XML ), // MIME: "text/xml"
CSV ( 2, ReportURL.FORMAT_CSV , _ACL_FORMAT_CSV ), // MIME: "text/csv"
XLS ( 3, ReportURL.FORMAT_XLS , _ACL_FORMAT_XLS ), // MIME: "application/vnd.ms-excel"
XLSX ( 4, ReportURL.FORMAT_XLSX , _ACL_FORMAT_XLS ), // MIME: "application/vnd.ms-excel"
TXT ( 5, ReportURL.FORMAT_TXT , _ACL_FORMAT_CSV ), // MIME: "text/plain" (csv format)
SOAP ( 6, ReportURL.FORMAT_SOAPXML , _ACL_FORMAT_XML ), //
EHTML ( 7, ReportURL.FORMAT_EHTML , _ACL_FORMAT_EMAIL ), //
CUSTOM ( 8, ReportURL.FORMAT_CUSTOM , _ACL_FORMAT_CUSTOM ), //
SCHEDULE ( 9, ReportURL.FORMAT_SCHEDULE, _ACL_FORMAT_SCHEDULE), //
URL ( 10, ReportURL.FORMAT_URL , null ), //
EMAIL ( 11, ReportURL.FORMAT_EMAIL , _ACL_FORMAT_EMAIL ), //
CALLBACK ( 12, ReportURL.FORMAT_CALLBACK, null ), //
PDF ( 13, ReportURL.FORMAT_PDF , _ACL_FORMAT_PDF ); //
// ---
private int vv = 0;
private String ff = null;
private String aa = null;
Format(int v, String f, String a) { vv = v; ff = f; aa = a; }
public int getIntValue() { return vv; }
public String getACL() { return aa; }
public String getFormat() { return ff; }
public String getStringValue() { return this.getFormat(); }
public String toString() { return this.getFormat(); }
}
/**
*** Gets the Format enum value for the specified name
*** @param name The name of the Format
*** @return The Format, or null if the name is invalid
**/
public static Format getFormat(String name)
{
return EnumTools.getValueOf(Format.class, name, (Format)null);
}
/**
*** Gets the ACL name for the specified format
**/
public static String getFormatACL(String name)
{
Format fmt = ReportURL.getFormat(name);
return (fmt != null)? fmt.getACL() : null;
}
/**
*** Return true if the specified user has read-access to the specified report format
**/
public static boolean hasFormatReadAccess(User user, BasicPrivateLabel privLabel, String aclName, String fmtName)
{
/* pre-checks */
if (privLabel == null) {
// -- BasicPrivateLabel is required
return false;
} else
if (StringTools.isBlank(aclName)) {
// -- no ACL name to check
return false;
}
/* get format */
Format fmt = ReportURL.getFormat(fmtName);
if (fmt == null) {
// -- invalid format
return false;
}
/* is HTML? */
if (fmt.equals(ReportURL.Format.HTML)) {
// -- always has access to HTML format
return true;
}
/* non-html formats allowed? */
if (!privLabel.hasReadAccess(user, AclEntry.CreateAclName(aclName,_ACL_FORMAT_NON_HTML))) {
// -- non-html format not allowed
return false;
}
/* check ACL */
String subACL = fmt.getACL();
if (StringTools.isBlank(subACL)) {
// -- no sub ACL, assume true
return true;
}
/* hasReadAccess? */
return privLabel.hasReadAccess(user, AclEntry.CreateAclName(aclName,subACL));
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static final String PARM_PAGE = "page"; // org.opengts.war.tools.CommonServlet.PARM_PAGE;
public static final String PAGE_REPORT_SHOW = "report.show"; // org.opengts.war.track.Constants.PAGE_REPORT_SHOW;
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Return a URL for the EventDetail report, for the specified Device
**/
public static URIArg createEventDetailReportURL(
String userID, boolean inclPass,
Device dev, long timestamp,
String baseURL)
{
/* no Device? */
if (dev == null) {
return null;
}
String deviceID = dev.getDeviceID();
String groupID = null;
/* get Account */
String accountID = dev.getAccountID();
Account account = dev.getAccount();
TimeZone acctTZ = account.getTimeZone(null/*default*/);
/* get User */
User user = null;
try {
if (StringTools.isBlank(userID)) {
String uid = account.getDefaultUser();
if (!StringTools.isBlank(uid)) {
userID = uid;
} else {
userID = User.getAdminUserID();
}
}
user = User.getUser(account, userID); // may return null
} catch (DBException dbe) {
Print.logException("Reading User: " + accountID + "/" + userID, dbe);
user = null;
}
TimeZone TZ = (user != null)? user.getTimeZone(acctTZ) : acctTZ;
/* get password */
String encPass = null;
if (inclPass) {
if (!StringTools.isBlank(userID) && (user != null)) {
encPass = user.getDecodedPassword(null/*BasicPrivateLabel*/);
} else {
encPass = account.getDecodedPassword(null/*BasicPrivateLabel*/);
}
}
/* Event dates */
String date_tz = ""; // (user != null)? user.getTimeZone() : account.getTimeZone(); // String name
String date_fr = null;
String date_to = null;
if (timestamp > 0L) {
date_fr = String.valueOf(timestamp - 2L);
date_to = String.valueOf(timestamp + 2L);
} else {
date_fr = "last";
date_to = "from";
}
/* report */
String r_report = ""; // default ("EventDetail")
String r_limit = ""; // default (1000)
String r_limType = ""; // default ("last")
String r_format = ""; // default ("http")
/* create URL */
boolean rtpEncode = true;
return ReportURL.createReportURL(
baseURL, rtpEncode,
accountID, userID, encPass,
deviceID, groupID,
date_fr, date_to, date_tz,
r_report,
r_limit, r_limType,
r_format);
}
// ------------------------------------------------------------------------
// account=<account> user=<user>
// r_report=<report>
// device=<device> | group=<group>
// date_fr=<ts> date_to=<ts> date_tz=<tz>
// r_limit=<limit> r_limType=last|first
// format=html|csv|xml
public static URIArg createReportURL(URIArg rptURL, boolean rtpEncode)
{
if (rptURL == null) { return null; }
String baseURL = rptURL.getURI();
RTProperties rtp = rptURL.getArgProperties();
// authorization
String accountID = rtp.getString(RPTARG_ACCOUNT , "");
String userID = rtp.getString(RPTARG_USER , "");
String encPass = rtp.getString(RPTARG_ENCPASS , "");
// device/group report
String deviceID = rtp.getString(RPTARG_DEVICE , "");
String groupID = rtp.getString(RPTARG_GROUP , "");
// date range
String date_fr = rtp.getString(RPTARG_DATE_FR , "");
String date_to = rtp.getString(RPTARG_DATE_TO , "");
String date_tz = rtp.getString(RPTARG_DATE_TZ , "");
// report attributes
String r_report = rtp.getString(RPTARG_REPORT , "");
String r_limit = rtp.getString(RPTARG_LIMIT , "");
String r_limType = rtp.getString(RPTARG_LIMIT_TYPE , "");
String r_format = rtp.getString(RPTARG_FORMAT , "");
// create report url
return ReportURL.createReportURL(
baseURL, rtpEncode,
accountID, userID, encPass,
deviceID, groupID,
date_fr, date_to, date_tz,
r_report,
r_limit, r_limType,
r_format);
}
public static URIArg createReportURL(
String baseURL, boolean rtpEncode,
String accountID, String userID, String encPass,
String deviceID, String groupID,
String date_fr, String date_to, String date_tz,
String r_report,
// remaining args are optional
String r_limit, String r_limType,
String r_format)
{
/* URL */
URIArg url = new URIArg(baseURL);
if (!StringTools.isBlank(accountID)) {
url.addArg(RPTARG_ACCOUNT, accountID);
}
if (!StringTools.isBlank(userID)) {
url.addArg(RPTARG_USER, userID);
}
if (!StringTools.isBlank(encPass) && !encPass.equals(Account.BLANK_PASSWORD)) {
url.addArg(RPTARG_ENCPASS, encPass);
}
/* create RTP */
RTProperties rtp = new RTProperties();
rtp.setString(PARM_PAGE, PAGE_REPORT_SHOW);
// device=
if (!StringTools.isBlank(deviceID)) {
rtp.removeProperties(RPTARG_DEVICE);
rtp.setString(RPTARG_DEVICE, deviceID);
}
// group=
if (!StringTools.isBlank(groupID)) {
rtp.removeProperties(RPTARG_GROUP);
rtp.setString(RPTARG_GROUP, groupID);
}
// date_fr=
if (!StringTools.isBlank(date_fr)) {
rtp.removeProperties(RPTARG_DATE_FR);
rtp.setString(RPTARG_DATE_FR[1], date_fr);
}
// date_to=
if (!StringTools.isBlank(date_to)) {
rtp.removeProperties(RPTARG_DATE_TO);
rtp.setString(RPTARG_DATE_TO[1], date_to);
}
// date_tz=
if (!StringTools.isBlank(date_tz)) {
rtp.removeProperties(RPTARG_DATE_TO);
rtp.setString(RPTARG_DATE_TZ[1], date_tz);
}
// r_report=
if (!StringTools.isBlank(r_report)) {
rtp.removeProperties(RPTARG_REPORT);
rtp.setString(RPTARG_REPORT[1], r_report);
}
// r_limit=
if (!StringTools.isBlank(r_limit)) {
rtp.removeProperties(RPTARG_LIMIT);
rtp.setString(RPTARG_LIMIT[1], r_limit);
}
// r_limTyp=
if (!StringTools.isBlank(r_limType)) {
rtp.removeProperties(RPTARG_LIMIT_TYPE);
rtp.setString(RPTARG_LIMIT_TYPE[1], r_limType);
}
// r_format=
if (!StringTools.isBlank(r_format)) {
rtp.removeProperties(RPTARG_FORMAT);
rtp.setString(RPTARG_FORMAT[1], r_format);
}
Print.logInfo("Report RPT: " + rtp);
/* remaining arguments */
if (rtpEncode) {
url.addArg(URLARG_RTP, rtp);
} else {
Map<Object,Object> props = rtp.getProperties();
for (Object rtk : props.keySet()) {
Object rtv = props.get(rtk);
url.addArg((String)rtk, StringTools.trim(rtv));
}
}
/* URL */
return url;
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static void main(String args[])
{
RTConfig.setCommandLineArgs(args);
if (RTConfig.hasProperty("urld")) {
String urld = RTConfig.getString("urld","");
URIArg rtpUrl = new URIArg(urld);
URIArg decUrl = rtpUrl.rtpDecode(URLARG_RTP);
Print.sysPrintln("Decoded URL: " + decUrl.toString());
System.exit(0);
}
if (RTConfig.hasProperty("urle")) {
String urle = RTConfig.getString("urle","");
URIArg decUrl = new URIArg(urle);
URIArg rtpUrl = decUrl.rtpEncode(URLARG_RTP, RPTARG_ACCOUNT, RPTARG_USER);
Print.sysPrintln("Encoded URL: " + rtpUrl.toString());
System.exit(0);
}
if (RTConfig.hasProperty("rpturl")) {
String baseURL = StringTools.blankDefault(RTConfig.getString("rpturl",null), ".");
String accountID = RTConfig.getString("account","demo");
String userID = RTConfig.getString("user" ,null);
String deviceID = RTConfig.getString("device" ,"demo");
Account account = null;
Device device = null;
try {
account = Account.getAccount(accountID); // may throw DBException
device = Device.getDevice(account, deviceID, false); // may throw DBException
if (device == null) {
Print.logError("Account/Device does not exist: " + accountID + "/" + deviceID);
System.exit(99);
}
} catch (DBException dbe) {
Print.logError("Error getting Device: " + accountID + "/" + deviceID);
dbe.printException();
System.exit(99);
}
URIArg url = ReportURL.createEventDetailReportURL(
userID, true/*inclPass*/,
device, 0L/*timestamp*/,
baseURL);
Print.sysPrintln("Report URL : " + url);
URIArg decUrl = url.rtpDecode(URLARG_RTP);
Print.sysPrintln("Decoded URL: " + decUrl.toString());
System.exit(0);
}
String url = RTConfig.getString("url" ,"");
String account = RTConfig.getString(RPTARG_ACCOUNT ,"");
String user = RTConfig.getString(RPTARG_USER ,"");
String encPass = RTConfig.getString(RPTARG_ENCPASS ,"");
String device = RTConfig.getString(RPTARG_DEVICE ,"");
String group = RTConfig.getString(RPTARG_GROUP ,"");
String date_fr = RTConfig.getString(RPTARG_DATE_FR ,"");
String date_to = RTConfig.getString(RPTARG_DATE_TO ,"");
String date_tz = RTConfig.getString(RPTARG_DATE_TZ ,"");
String r_report = RTConfig.getString(RPTARG_REPORT ,"");
String r_limit = RTConfig.getString(RPTARG_LIMIT ,"");
String r_limType = RTConfig.getString(RPTARG_LIMIT_TYPE ,"");
String format = RTConfig.getString(RPTARG_FORMAT ,"");
/* URL */
URIArg rptURL = ReportURL.createReportURL(
url, false,
account, user, "",
device, group,
date_fr, date_to, date_tz,
r_report, r_limit, r_limType,
format);
Print.logInfo("URL: " + rptURL);
}
}
| |
/*
* Copyright 2000-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.bcel.generic;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.bcel.Constants;
import org.apache.bcel.classfile.Attribute;
import org.apache.bcel.classfile.Constant;
import org.apache.bcel.classfile.ConstantObject;
import org.apache.bcel.classfile.ConstantPool;
import org.apache.bcel.classfile.ConstantValue;
import org.apache.bcel.classfile.Field;
import org.apache.bcel.classfile.Utility;
import org.apache.bcel.util.BCELComparator;
/**
* Template class for building up a field. The only extraordinary thing
* one can do is to add a constant value attribute to a field (which must of
* course be compatible with to the declared type).
*
* @version $Id: FieldGen.java 386056 2006-03-15 11:31:56Z tcurdt $
* @author <A HREF="mailto:m.dahm@gmx.de">M. Dahm</A>
* @see Field
*/
public class FieldGen extends FieldGenOrMethodGen {
private Object value = null;
private static BCELComparator _cmp = new BCELComparator() {
public boolean equals( Object o1, Object o2 ) {
FieldGen THIS = (FieldGen) o1;
FieldGen THAT = (FieldGen) o2;
return THIS.getName().equals(THAT.getName())
&& THIS.getSignature().equals(THAT.getSignature());
}
public int hashCode( Object o ) {
FieldGen THIS = (FieldGen) o;
return THIS.getSignature().hashCode() ^ THIS.getName().hashCode();
}
};
/**
* Declare a field. If it is static (isStatic() == true) and has a
* basic type like int or String it may have an initial value
* associated with it as defined by setInitValue().
*
* @param access_flags access qualifiers
* @param type field type
* @param name field name
* @param cp constant pool
*/
public FieldGen(int access_flags, Type type, String name, ConstantPoolGen cp) {
setAccessFlags(access_flags);
setType(type);
setName(name);
setConstantPool(cp);
}
/**
* Instantiate from existing field.
*
* @param field Field object
* @param cp constant pool (must contain the same entries as the field's constant pool)
*/
public FieldGen(Field field, ConstantPoolGen cp) {
this(field.getAccessFlags(), Type.getType(field.getSignature()), field.getName(), cp);
Attribute[] attrs = field.getAttributes();
for (int i = 0; i < attrs.length; i++) {
if (attrs[i] instanceof ConstantValue) {
setValue(((ConstantValue) attrs[i]).getConstantValueIndex());
} else {
addAttribute(attrs[i]);
}
}
}
private void setValue( int index ) {
ConstantPool cp = this.cp.getConstantPool();
Constant c = cp.getConstant(index);
value = ((ConstantObject) c).getConstantValue(cp);
}
/**
* Set (optional) initial value of field, otherwise it will be set to null/0/false
* by the JVM automatically.
*/
public void setInitValue( String str ) {
checkType(new ObjectType("java.lang.String"));
if (str != null) {
value = str;
}
}
public void setInitValue( long l ) {
checkType(Type.LONG);
if (l != 0L) {
value = new Long(l);
}
}
public void setInitValue( int i ) {
checkType(Type.INT);
if (i != 0) {
value = new Integer(i);
}
}
public void setInitValue( short s ) {
checkType(Type.SHORT);
if (s != 0) {
value = new Integer(s);
}
}
public void setInitValue( char c ) {
checkType(Type.CHAR);
if (c != 0) {
value = new Integer(c);
}
}
public void setInitValue( byte b ) {
checkType(Type.BYTE);
if (b != 0) {
value = new Integer(b);
}
}
public void setInitValue( boolean b ) {
checkType(Type.BOOLEAN);
if (b) {
value = new Integer(1);
}
}
public void setInitValue( float f ) {
checkType(Type.FLOAT);
if (f != 0.0) {
value = new Float(f);
}
}
public void setInitValue( double d ) {
checkType(Type.DOUBLE);
if (d != 0.0) {
value = new Double(d);
}
}
/** Remove any initial value.
*/
public void cancelInitValue() {
value = null;
}
private void checkType( Type atype ) {
if (type == null) {
throw new ClassGenException("You haven't defined the type of the field yet");
}
if (!isFinal()) {
throw new ClassGenException("Only final fields may have an initial value!");
}
if (!type.equals(atype)) {
throw new ClassGenException("Types are not compatible: " + type + " vs. " + atype);
}
}
/**
* Get field object after having set up all necessary values.
*/
public Field getField() {
String signature = getSignature();
int name_index = cp.addUtf8(name);
int signature_index = cp.addUtf8(signature);
if (value != null) {
checkType(type);
int index = addConstant();
addAttribute(new ConstantValue(cp.addUtf8("ConstantValue"), 2, index, cp
.getConstantPool()));
}
return new Field(access_flags, name_index, signature_index, getAttributes(), cp
.getConstantPool());
}
private int addConstant() {
switch (type.getType()) {
case Constants.T_INT:
case Constants.T_CHAR:
case Constants.T_BYTE:
case Constants.T_BOOLEAN:
case Constants.T_SHORT:
return cp.addInteger(((Integer) value).intValue());
case Constants.T_FLOAT:
return cp.addFloat(((Float) value).floatValue());
case Constants.T_DOUBLE:
return cp.addDouble(((Double) value).doubleValue());
case Constants.T_LONG:
return cp.addLong(((Long) value).longValue());
case Constants.T_REFERENCE:
return cp.addString(((String) value));
default:
throw new RuntimeException("Oops: Unhandled : " + type.getType());
}
}
public String getSignature() {
return type.getSignature();
}
private List observers;
/** Add observer for this object.
*/
public void addObserver( FieldObserver o ) {
if (observers == null) {
observers = new ArrayList();
}
observers.add(o);
}
/** Remove observer for this object.
*/
public void removeObserver( FieldObserver o ) {
if (observers != null) {
observers.remove(o);
}
}
/** Call notify() method on all observers. This method is not called
* automatically whenever the state has changed, but has to be
* called by the user after he has finished editing the object.
*/
public void update() {
if (observers != null) {
for (Iterator e = observers.iterator(); e.hasNext();) {
((FieldObserver) e.next()).notify(this);
}
}
}
public String getInitValue() {
if (value != null) {
return value.toString();
} else {
return null;
}
}
/**
* Return string representation close to declaration format,
* `public static final short MAX = 100', e.g..
*
* @return String representation of field
*/
public final String toString() {
String name, signature, access; // Short cuts to constant pool
access = Utility.accessToString(access_flags);
access = access.equals("") ? "" : (access + " ");
signature = type.toString();
name = getName();
StringBuffer buf = new StringBuffer(32);
buf.append(access).append(signature).append(" ").append(name);
String value = getInitValue();
if (value != null) {
buf.append(" = ").append(value);
}
return buf.toString();
}
/** @return deep copy of this field
*/
public FieldGen copy( ConstantPoolGen cp ) {
FieldGen fg = (FieldGen) clone();
fg.setConstantPool(cp);
return fg;
}
/**
* @return Comparison strategy object
*/
public static BCELComparator getComparator() {
return _cmp;
}
/**
* @param comparator Comparison strategy object
*/
public static void setComparator( BCELComparator comparator ) {
_cmp = comparator;
}
/**
* Return value as defined by given BCELComparator strategy.
* By default two FieldGen objects are said to be equal when
* their names and signatures are equal.
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals( Object obj ) {
return _cmp.equals(this, obj);
}
/**
* Return value as defined by given BCELComparator strategy.
* By default return the hashcode of the field's name XOR signature.
*
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
return _cmp.hashCode(this);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClientWithLocalCache;
import org.apache.hadoop.hive.metastore.messaging.json.gzip.GzipJSONMessageEncoder;
import org.apache.hadoop.hive.shims.Utils;
import static org.apache.hadoop.hive.metastore.ReplChangeManager.SOURCE_OF_REPLICATION;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
/**
* TestReplicationScenariosAcidTables - test replication for ACID tables
*/
public class TestReplicationScenariosIncrementalLoadAcidTables {
@Rule
public final TestName testName = new TestName();
protected static final Logger LOG = LoggerFactory.getLogger(TestReplicationScenariosIncrementalLoadAcidTables.class);
static WarehouseInstance primary;
private static WarehouseInstance replica, replicaNonAcid;
private static HiveConf conf;
private String primaryDbName, replicatedDbName, primaryDbNameExtra;
@BeforeClass
public static void classLevelSetup() throws Exception {
HashMap<String, String> overrides = new HashMap<>();
overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(),
GzipJSONMessageEncoder.class.getCanonicalName());
internalBeforeClassSetup(overrides, TestReplicationScenariosIncrementalLoadAcidTables.class);
}
static void internalBeforeClassSetup(Map<String, String> overrides, Class clazz)
throws Exception {
conf = new HiveConf(clazz);
conf.set("dfs.client.use.datanode.hostname", "true");
conf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts", "*");
MiniDFSCluster miniDFSCluster =
new MiniDFSCluster.Builder(conf).numDataNodes(2).format(true).build();
HashMap<String, String> acidConfs = new HashMap<String, String>() {{
put("fs.defaultFS", miniDFSCluster.getFileSystem().getUri().toString());
put("hive.support.concurrency", "true");
put("hive.txn.manager", "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
put("hive.metastore.client.capability.check", "false");
put("hive.repl.bootstrap.dump.open.txn.timeout", "1s");
put("hive.strict.checks.bucketing", "false");
put("hive.mapred.mode", "nonstrict");
put("mapred.input.dir.recursive", "true");
put("hive.metastore.disallow.incompatible.col.type.changes", "false");
put("hive.stats.autogather", "false");
}};
acidConfs.putAll(overrides);
primary = new WarehouseInstance(LOG, miniDFSCluster, acidConfs);
acidConfs.put(MetastoreConf.ConfVars.REPLDIR.getHiveName(), primary.repldDir);
replica = new WarehouseInstance(LOG, miniDFSCluster, acidConfs);
Map<String, String> overridesForHiveConf1 = new HashMap<String, String>() {{
put("fs.defaultFS", miniDFSCluster.getFileSystem().getUri().toString());
put("hive.support.concurrency", "false");
put("hive.txn.manager", "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
put("hive.metastore.client.capability.check", "false");
put("hive.stats.autogather", "false");
}};
overridesForHiveConf1.put(MetastoreConf.ConfVars.REPLDIR.getHiveName(), primary.repldDir);
replicaNonAcid = new WarehouseInstance(LOG, miniDFSCluster, overridesForHiveConf1);
}
@AfterClass
public static void classLevelTearDown() throws IOException {
primary.close();
replica.close();
}
@Before
public void setup() throws Throwable {
// set up metastore client cache
if (conf.getBoolVar(HiveConf.ConfVars.MSC_CACHE_ENABLED)) {
HiveMetaStoreClientWithLocalCache.init(conf);
}
primaryDbName = testName.getMethodName() + "_" + +System.currentTimeMillis();
replicatedDbName = "replicated_" + primaryDbName;
primary.run("create database " + primaryDbName + " WITH DBPROPERTIES ( '" +
SOURCE_OF_REPLICATION + "' = '1,2,3')");
primaryDbNameExtra = primaryDbName+"_extra";
primary.run("create database " + primaryDbNameExtra + " WITH DBPROPERTIES ( '" +
SOURCE_OF_REPLICATION + "' = '1,2,3')");
}
@After
public void tearDown() throws Throwable {
primary.run("drop database if exists " + primaryDbName + " cascade");
replica.run("drop database if exists " + replicatedDbName + " cascade");
replicaNonAcid.run("drop database if exists " + replicatedDbName + " cascade");
primary.run("drop database if exists " + primaryDbName + "_extra cascade");
}
@Test
public void testAcidTableIncrementalReplication() throws Throwable {
WarehouseInstance.Tuple bootStrapDump = primary.dump(primaryDbName);
replica.load(replicatedDbName, primaryDbName)
.run("REPL STATUS " + replicatedDbName)
.verifyResult(bootStrapDump.lastReplicationId);
List<String> selectStmtList = new ArrayList<>();
List<String[]> expectedValues = new ArrayList<>();
String tableName = testName.getMethodName() + "testInsert";
String tableNameMM = tableName + "_MM";
ReplicationTestUtils.appendInsert(primary, primaryDbName, primaryDbNameExtra, tableName,
tableNameMM, selectStmtList, expectedValues);
appendDelete(primary, primaryDbName, primaryDbNameExtra, selectStmtList, expectedValues);
appendUpdate(primary, primaryDbName, primaryDbNameExtra, selectStmtList, expectedValues);
ReplicationTestUtils.appendTruncate(primary, primaryDbName, primaryDbNameExtra,
selectStmtList, expectedValues);
ReplicationTestUtils.appendInsertIntoFromSelect(primary, primaryDbName, primaryDbNameExtra,
tableName, tableNameMM, selectStmtList, expectedValues);
ReplicationTestUtils.appendMerge(primary, primaryDbName, primaryDbNameExtra, selectStmtList, expectedValues);
ReplicationTestUtils.appendCreateAsSelect(primary, primaryDbName, primaryDbNameExtra, tableName,
tableNameMM, selectStmtList, expectedValues);
ReplicationTestUtils.appendImport(primary, primaryDbName, primaryDbNameExtra, tableName,
tableNameMM, selectStmtList, expectedValues);
ReplicationTestUtils.appendInsertOverwrite(primary, primaryDbName, primaryDbNameExtra, tableName,
tableNameMM, selectStmtList, expectedValues);
ReplicationTestUtils.appendLoadLocal(primary, primaryDbName, primaryDbNameExtra, tableName,
tableNameMM, selectStmtList, expectedValues);
ReplicationTestUtils.appendInsertUnion(primary, primaryDbName, primaryDbNameExtra, tableName,
tableNameMM, selectStmtList, expectedValues);
ReplicationTestUtils.appendMultiStatementTxn(primary, primaryDbName, primaryDbNameExtra,
selectStmtList, expectedValues);
appendMultiStatementTxnUpdateDelete(primary, primaryDbName, primaryDbNameExtra, selectStmtList, expectedValues);
ReplicationTestUtils.appendAlterTable(primary, primaryDbName, primaryDbNameExtra, selectStmtList, expectedValues);
verifyIncrementalLoadInt(selectStmtList, expectedValues, bootStrapDump.lastReplicationId);
}
private void appendDelete(WarehouseInstance primary, String primaryDbName, String primaryDbNameExtra,
List<String> selectStmtList, List<String[]> expectedValues) throws Throwable {
String tableName = "testDelete";
ReplicationTestUtils.insertRecords(primary, primaryDbName, primaryDbNameExtra,
tableName, null, false, ReplicationTestUtils.OperationType.REPL_TEST_ACID_INSERT);
deleteRecords(tableName);
selectStmtList.add("select count(*) from " + tableName);
expectedValues.add(new String[] {"0"});
}
private void appendUpdate(WarehouseInstance primary, String primaryDbName, String primaryDbNameExtra,
List<String> selectStmtList, List<String[]> expectedValues) throws Throwable {
String tableName = "testUpdate";
ReplicationTestUtils.insertRecords(primary, primaryDbName, primaryDbNameExtra,
tableName, null, false, ReplicationTestUtils.OperationType.REPL_TEST_ACID_INSERT);
updateRecords(tableName);
selectStmtList.add("select value from " + tableName + " order by value");
expectedValues.add(new String[] {"1", "100", "100", "100", "100"});
}
private void appendMultiStatementTxnUpdateDelete(WarehouseInstance primary, String primaryDbName, String primaryDbNameExtra,
List<String> selectStmtList, List<String[]> expectedValues)
throws Throwable {
String tableName = "testMultiStatementTxnUpdate";
String tableNameDelete = "testMultiStatementTxnDelete";
String[] resultArray = new String[]{"1", "2", "3", "4", "5"};
String tableProperty = "'transactional'='true'";
String tableStorage = "STORED AS ORC";
ReplicationTestUtils.insertIntoDB(primary, primaryDbName, tableName, tableProperty,
tableStorage, resultArray, true);
updateRecords(tableName);
selectStmtList.add("select value from " + tableName + " order by value");
expectedValues.add(new String[] {"1", "100", "100", "100", "100"});
ReplicationTestUtils.insertIntoDB(primary, primaryDbName, tableNameDelete, tableProperty,
tableStorage, resultArray, true);
deleteRecords(tableNameDelete);
selectStmtList.add("select count(*) from " + tableNameDelete);
expectedValues.add(new String[] {"0"});
}
@Test
public void testReplCM() throws Throwable {
String tableName = "testcm";
String tableNameMM = tableName + "_MM";
String[] result = new String[]{"5"};
WarehouseInstance.Tuple incrementalDump;
WarehouseInstance.Tuple bootStrapDump = primary.dump(primaryDbName);
replica.load(replicatedDbName, primaryDbName)
.run("REPL STATUS " + replicatedDbName)
.verifyResult(bootStrapDump.lastReplicationId);
ReplicationTestUtils.insertRecords(primary, primaryDbName, primaryDbNameExtra,
tableName, null, false, ReplicationTestUtils.OperationType.REPL_TEST_ACID_INSERT);
incrementalDump = primary.dump(primaryDbName);
primary.run("drop table " + primaryDbName + "." + tableName);
replica.loadWithoutExplain(replicatedDbName, primaryDbName)
.run("REPL STATUS " + replicatedDbName).verifyResult(incrementalDump.lastReplicationId);
verifyResultsInReplicaInt(Lists.newArrayList("select count(*) from " + tableName,
"select count(*) from " + tableName + "_nopart"),
Lists.newArrayList(result, result));
ReplicationTestUtils.insertRecords(primary, primaryDbName, primaryDbNameExtra,
tableNameMM, null, true, ReplicationTestUtils.OperationType.REPL_TEST_ACID_INSERT);
incrementalDump = primary.dump(primaryDbName);
primary.run("drop table " + primaryDbName + "." + tableNameMM);
replica.loadWithoutExplain(replicatedDbName, primaryDbName)
.run("REPL STATUS " + replicatedDbName).verifyResult(incrementalDump.lastReplicationId);
verifyResultsInReplicaInt(Lists.newArrayList("select count(*) from " + tableNameMM,
"select count(*) from " + tableNameMM + "_nopart"),
Lists.newArrayList(result, result));
}
private void verifyResultsInReplicaInt(List<String> selectStmtList, List<String[]> expectedValues) throws Throwable {
ReplicationTestUtils.verifyResultsInReplica(replica, replicatedDbName, selectStmtList, expectedValues);
}
private WarehouseInstance.Tuple verifyIncrementalLoadInt(List<String> selectStmtList,
List<String[]> expectedValues, String lastReplId) throws Throwable {
return ReplicationTestUtils.verifyIncrementalLoad(primary, replica, primaryDbName,
replicatedDbName, selectStmtList, expectedValues, lastReplId);
}
private void deleteRecords(String tableName) throws Throwable {
primary.run("use " + primaryDbName)
.run("delete from " + tableName)
.run("select count(*) from " + tableName)
.verifyResult("0");
}
private void updateRecords(String tableName) throws Throwable {
primary.run("use " + primaryDbName)
.run("update " + tableName + " set value = 100 where key >= 2")
.run("select value from " + tableName + " order by value")
.verifyResults(new String[] {"1", "100", "100", "100", "100"});
}
}
| |
/**
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.basics.date;
import static com.opengamma.strata.basics.date.LocalDateUtils.plusDays;
import java.io.Serializable;
import java.time.DateTimeException;
import java.time.LocalDate;
import java.time.Period;
import java.time.format.DateTimeParseException;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAmount;
import java.time.temporal.TemporalUnit;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.util.List;
import org.joda.convert.FromString;
import org.joda.convert.ToString;
import com.opengamma.strata.collect.ArgChecker;
/**
* A tenor indicating how long it will take for a financial instrument to reach maturity.
* <p>
* A tenor is allowed to be any non-negative non-zero period of days, weeks, month or years.
* This class provides constants for common tenors which are best used by static import.
* <p>
* Each tenor is based on a {@link Period}. The months and years of the period are not normalized,
* thus it is possible to have a tenor of 12 months and a different one of 1 year.
* When used, standard date addition rules apply, thus there is no difference between them.
* Call {@link #normalized()} to apply normalization.
*
* <h4>Usage</h4>
* {@code Tenor} implements {@code TemporalAmount} allowing it to be directly added to a date:
* <pre>
* LocalDate later = baseDate.plus(tenor);
* </pre>
*/
public final class Tenor
implements TemporalAmount, Serializable {
/**
* Serialization version.
*/
private static final long serialVersionUID = 1;
/**
* A tenor of one day.
*/
public static final Tenor TENOR_1D = ofDays(1);
/**
* A tenor of two days.
*/
public static final Tenor TENOR_2D = ofDays(2);
/**
* A tenor of three days.
*/
public static final Tenor TENOR_3D = ofDays(3);
/**
* A tenor of 1 week.
*/
public static final Tenor TENOR_1W = ofWeeks(1);
/**
* A tenor of 2 weeks.
*/
public static final Tenor TENOR_2W = ofWeeks(2);
/**
* A tenor of 3 weeks.
*/
public static final Tenor TENOR_3W = ofWeeks(3);
/**
* A tenor of 6 weeks.
*/
public static final Tenor TENOR_6W = ofWeeks(6);
/**
* A tenor of 1 month.
*/
public static final Tenor TENOR_1M = ofMonths(1);
/**
* A tenor of 2 months.
*/
public static final Tenor TENOR_2M = ofMonths(2);
/**
* A tenor of 3 months.
*/
public static final Tenor TENOR_3M = ofMonths(3);
/**
* A tenor of 4 months.
*/
public static final Tenor TENOR_4M = ofMonths(4);
/**
* A tenor of 5 months.
*/
public static final Tenor TENOR_5M = ofMonths(5);
/**
* A tenor of 6 months.
*/
public static final Tenor TENOR_6M = ofMonths(6);
/**
* A tenor of 7 months.
*/
public static final Tenor TENOR_7M = ofMonths(7);
/**
* A tenor of 8 months.
*/
public static final Tenor TENOR_8M = ofMonths(8);
/**
* A tenor of 9 months.
*/
public static final Tenor TENOR_9M = ofMonths(9);
/**
* A tenor of 10 months.
*/
public static final Tenor TENOR_10M = ofMonths(10);
/**
* A tenor of 11 months.
*/
public static final Tenor TENOR_11M = ofMonths(11);
/**
* A tenor of 12 months.
*/
public static final Tenor TENOR_12M = ofMonths(12);
/**
* A tenor of 18 months.
*/
public static final Tenor TENOR_18M = ofMonths(18);
/**
* A tenor of 1 year.
*/
public static final Tenor TENOR_1Y = ofYears(1);
/**
* A tenor of 2 years.
*/
public static final Tenor TENOR_2Y = ofYears(2);
/**
* A tenor of 3 years.
*/
public static final Tenor TENOR_3Y = ofYears(3);
/**
* A tenor of 4 years.
*/
public static final Tenor TENOR_4Y = ofYears(4);
/**
* A tenor of 5 years.
*/
public static final Tenor TENOR_5Y = ofYears(5);
/**
* A tenor of 6 years.
*/
public static final Tenor TENOR_6Y = ofYears(6);
/**
* A tenor of 7 years.
*/
public static final Tenor TENOR_7Y = ofYears(7);
/**
* A tenor of 8 years.
*/
public static final Tenor TENOR_8Y = ofYears(8);
/**
* A tenor of 9 years.
*/
public static final Tenor TENOR_9Y = ofYears(9);
/**
* A tenor of 10 years.
*/
public static final Tenor TENOR_10Y = ofYears(10);
/**
* A tenor of 12 years.
*/
public static final Tenor TENOR_12Y = ofYears(12);
/**
* A tenor of 15 years.
*/
public static final Tenor TENOR_15Y = ofYears(15);
/**
* A tenor of 20 years.
*/
public static final Tenor TENOR_20Y = ofYears(20);
/**
* A tenor of 25 years.
*/
public static final Tenor TENOR_25Y = ofYears(25);
/**
* A tenor of 30 years.
*/
public static final Tenor TENOR_30Y = ofYears(30);
/**
* The period of the tenor.
*/
private final Period period;
/**
* The name of the tenor.
*/
private final String name;
//-------------------------------------------------------------------------
/**
* Obtains a {@code Tenor} from a {@code Period}.
* <p>
* The period normally consists of either days and weeks, or months and years.
* It must also be positive and non-zero.
* <p>
* If the number of days is an exact multiple of 7 it will be converted to weeks.
* Months are not normalized into years.
*
* @param period the period to convert to a tenor
* @return the tenor
* @throws IllegalArgumentException if the period is negative or zero
*/
public static Tenor of(Period period) {
ArgChecker.notNull(period, "period");
int days = period.getDays();
long months = period.toTotalMonths();
if (months == 0 && days != 0) {
return ofDays(days);
}
return new Tenor(period, period.toString().substring(1));
}
/**
* Returns a tenor backed by a period of days.
* <p>
* If the number of days is an exact multiple of 7 it will be converted to weeks.
*
* @param days the number of days
* @return the tenor
* @throws IllegalArgumentException if days is negative or zero
*/
public static Tenor ofDays(int days) {
if (days % 7 == 0) {
return ofWeeks(days / 7);
}
return new Tenor(Period.ofDays(days), days + "D");
}
/**
* Returns a tenor backed by a period of weeks.
*
* @param weeks the number of weeks
* @return the tenor
* @throws IllegalArgumentException if weeks is negative or zero
*/
public static Tenor ofWeeks(int weeks) {
return new Tenor(Period.ofWeeks(weeks), weeks + "W");
}
/**
* Returns a tenor backed by a period of months.
* <p>
* Months are not normalized into years.
*
* @param months the number of months
* @return the tenor
* @throws IllegalArgumentException if months is negative or zero
*/
public static Tenor ofMonths(int months) {
return new Tenor(Period.ofMonths(months), months + "M");
}
/**
* Returns a tenor backed by a period of years.
*
* @param years the number of years
* @return the tenor
* @throws IllegalArgumentException if years is negative or zero
*/
public static Tenor ofYears(int years) {
return new Tenor(Period.ofYears(years), years + "Y");
}
//-------------------------------------------------------------------------
/**
* Parses a formatted string representing the tenor.
* <p>
* The format can either be based on ISO-8601, such as 'P3M'
* or without the 'P' prefix e.g. '2W'.
*
* @param toParse the string representing the tenor
* @return the tenor
* @throws IllegalArgumentException if the tenor cannot be parsed
*/
@FromString
public static Tenor parse(String toParse) {
ArgChecker.notNull(toParse, "toParse");
String prefixed = toParse.startsWith("P") ? toParse : "P" + toParse;
try {
return Tenor.of(Period.parse(prefixed));
} catch (DateTimeParseException ex) {
throw new IllegalArgumentException(ex);
}
}
//-------------------------------------------------------------------------
/**
* Creates a tenor.
*
* @param period the period to represent
* @param name the name
*/
private Tenor(Period period, String name) {
ArgChecker.notNull(period, "period");
ArgChecker.isFalse(period.isZero(), "Period must not be zero");
ArgChecker.isFalse(period.isNegative(), "Period must not be negative");
this.period = period;
this.name = name;
}
// safe deserialization
private Object readResolve() {
return new Tenor(period, name);
}
//-------------------------------------------------------------------------
/**
* Gets the underlying period of the tenor.
*
* @return the period
*/
public Period getPeriod() {
return period;
}
//-------------------------------------------------------------------------
/**
* Normalizes the months and years of this tenor.
* <p>
* This method returns a tenor of an equivalent length but with any number
* of months greater than 12 normalized into a combination of months and years.
*
* @return the normalized tenor
*/
public Tenor normalized() {
Period norm = period.normalized();
return (norm != period ? Tenor.of(norm) : this);
}
//-------------------------------------------------------------------------
/**
* Checks if the tenor is week-based.
* <p>
* A week-based tenor consists of an integral number of weeks.
* There must be no day, month or year element.
*
* @return true if this is week-based
*/
public boolean isWeekBased() {
return period.toTotalMonths() == 0 && period.getDays() % 7 == 0;
}
/**
* Checks if the tenor is month-based.
* <p>
* A month-based tenor consists of an integral number of months.
* Any year-based tenor is also counted as month-based.
* There must be no day or week element.
*
* @return true if this is month-based
*/
public boolean isMonthBased() {
return period.toTotalMonths() > 0 && period.getDays() == 0;
}
//-------------------------------------------------------------------------
/**
* Gets the value of the specified unit.
* <p>
* This will return a value for the years, months and days units.
* Note that weeks are not included.
* All other units throw an exception.
* <p>
* This method implements {@link TemporalAmount}.
* It is not intended to be called directly.
*
* @param unit the unit to query
* @return the value of the unit
* @throws UnsupportedTemporalTypeException if the unit is not supported
*/
@Override
public long get(TemporalUnit unit) {
return period.get(unit);
}
/**
* Gets the units supported by a tenor.
* <p>
* This returns a list containing years, months and days.
* Note that weeks are not included.
* <p>
* This method implements {@link TemporalAmount}.
* It is not intended to be called directly.
*
* @return a list containing the years, months and days units
*/
@Override
public List<TemporalUnit> getUnits() {
return period.getUnits();
}
/**
* Adds this tenor to the specified date.
* <p>
* This method implements {@link TemporalAmount}.
* It is not intended to be called directly.
* Use {@link LocalDate#plus(TemporalAmount)} instead.
*
* @param temporal the temporal object to add to
* @return the result with this tenor added
* @throws DateTimeException if unable to add
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Temporal addTo(Temporal temporal) {
// special case for performance
if (temporal instanceof LocalDate) {
LocalDate date = (LocalDate) temporal;
return plusDays(date.plusMonths(period.toTotalMonths()), period.getDays());
}
return period.addTo(temporal);
}
/**
* Subtracts this tenor from the specified date.
* <p>
* This method implements {@link TemporalAmount}.
* It is not intended to be called directly.
* Use {@link LocalDate#minus(TemporalAmount)} instead.
*
* @param temporal the temporal object to subtract from
* @return the result with this tenor subtracted
* @throws DateTimeException if unable to subtract
* @throws ArithmeticException if numeric overflow occurs
*/
@Override
public Temporal subtractFrom(Temporal temporal) {
// special case for performance
if (temporal instanceof LocalDate) {
LocalDate date = (LocalDate) temporal;
return plusDays(date.minusMonths(period.toTotalMonths()), -period.getDays());
}
return period.subtractFrom(temporal);
}
//-------------------------------------------------------------------------
/**
* Checks if this tenor equals another tenor.
* <p>
* The comparison checks the tenor period.
*
* @param obj the other tenor, null returns false
* @return true if equal
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Tenor other = (Tenor) obj;
return period.equals(other.period);
}
/**
* Returns a suitable hash code for the tenor.
*
* @return the hash code
*/
@Override
public int hashCode() {
return period.hashCode();
}
/**
* Returns a formatted string representing the tenor.
* <p>
* The format is a combination of the quantity and unit, such as 1D, 2W, 3M, 4Y.
*
* @return the formatted tenor
*/
@ToString
@Override
public String toString() {
return name;
}
}
| |
/*
* Copyright (c) 2008-2013, Heng Yuan
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* Neither the name of the Heng Yuan nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY Heng Yuan ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL Heng Yuan BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.yuanheng.cookcc.dfa;
import java.util.Arrays;
import java.util.TreeMap;
import java.util.Vector;
import org.yuanheng.cookcc.lexer.ECS;
/**
* @author Heng Yuan
* @version $Id$
*/
class TableCompressor
{
private final static short SHORT_MIN = Short.MIN_VALUE;
private static class ErrorVector implements Comparable<ErrorVector>
{
private final short m_defaultValue;
private short[] m_error;
public ErrorVector (DFARow row, short defaultValue)
{
m_error = row.getStates ().clone ();
m_defaultValue = defaultValue;
// We can't use SHORT_MIN as a flag since we could potentially
// have values that are
for (int i = 0; i < m_error.length; i++)
m_error[i] = (m_error[i] == 0) ? 0 : (m_error[i] == defaultValue) ? defaultValue : 0;
}
public short[] getError ()
{
return m_error;
}
public void setError (short[] error)
{
m_error = error;
}
public short getDefaultValue ()
{
return m_defaultValue;
}
public int compareTo (ErrorVector other)
{
int size = m_error.length;
for (int i = 0; i < size; ++i)
{
if (m_error[i] == other.m_error[i] ||
// m_error[i] == SHORT_MIN ||
other.m_error[i] == SHORT_MIN)
continue;
return m_error[i] - other.m_error[i];
}
return 0;
}
}
private final DFATable m_dfa;
private final DFATable m_dfaCopy;
// the minimum repeat percentage a default value has to have, other
// wise, we won't use the error state
private final int MINREPEAT = 50;
// the number is used to tip the balance between choosing the
// state diff or using error state
private final int BALANCE;
private final int GOODREPEAT;
private final int m_rowSize;
private Vector<ErrorVector> m_errors = new Vector<ErrorVector> ();
private TreeMap<ErrorVector, Short> m_errorMap = new TreeMap<ErrorVector, Short> ();
private TreeMap<Integer, Vector<Short>> m_fillMap = new TreeMap<Integer, Vector<Short>> ();
private short[] m_next;
private short[] m_check;
private short[] m_base;
private short[] m_default;
private ECS m_ecsError;
private boolean m_useDefault = true;
private boolean m_useMeta = true;
private boolean m_useError = true;
private boolean m_useStateDiff;
public TableCompressor (DFATable dfa)
{
m_dfa = dfa;
m_dfaCopy = dfa.clone ();
m_rowSize = dfa.getRow (0).getStates ().length;
BALANCE = m_rowSize / 10;
GOODREPEAT = m_rowSize * MINREPEAT / 100;
m_ecsError = new ECS (m_rowSize - 1);
m_default = resize (null, m_dfaCopy.size (), SHORT_MIN);
m_base = new short[m_dfaCopy.size ()];
}
static short[] resize (short[] src, int newSize, short fill)
{
if (src != null)
{
if (src.length == newSize) // no need to do anything in this case.
return src;
}
short[] newArray = new short[newSize];
int start;
if (src != null)
{
System.arraycopy (src, 0, newArray, 0, src.length);
start = src.length;
}
else
start = 0;
for (int i = start; i < newSize; ++i)
newArray[i] = fill;
return newArray;
}
//
// Statistics gathering routines
//
// find out the # of zeros (i.e. errors) in the state
private int getErrorCount (int state)
{
int count = 0;
short[] cols = m_dfa.getRow (state).getStates ();
for (short c : cols)
if (c == 0)
++count;
return count;
}
// find out the # of values that are neither the given
// repeat value nor 0
private int getNonDefaultDiff (int state, short repeatValue)
{
int diff = 0;
short[] cols = m_dfa.getRow (state).getStates ();
for (short c : cols)
if (c != repeatValue && c != 0)
++diff;
return diff;
}
// get the difference between two states
private int getStateDiff (int state1, int state2)
{
int diff = 0;
short[] cols1 = m_dfa.getRow (state1).getStates ();
short[] cols2 = m_dfa.getRow (state2).getStates ();
for (int i = 0; i < cols1.length; ++i)
if (cols1[i] != cols2[i])
++diff;
return diff;
}
//
// process the DFA state such that it can be used for compression
// note, it destroys the DFA in the process!
//
private void cleanStateRepeat (int state, int repeatValue)
{
short[] cols = m_dfaCopy.getRow (state).getStates ();
for (int i = 0; i < cols.length; ++i)
if (cols[i] == repeatValue || cols[i] == 0)
cols[i] = SHORT_MIN;
}
//
// process the DFA state such that it can be used for compression
// note, it destroys the DFA in the process!
//
private void cleanStateDiff (int state1, int state2)
{
short[] cols1 = m_dfaCopy.getRow (state1).getStates ();
short[] cols2 = m_dfa.getRow (state2).getStates ();
for (int i = 0; i < cols1.length; ++i)
if (cols1[i] == cols2[i])
cols1[i] = SHORT_MIN;
}
//
// Get the difference between two states, and also retrieves
// the block min and max index.
//
private int getStateDiffBlock (int state1, int state2, int[] minMax)
{
short[] cols1 = m_dfa.getRow (state1).getStates ();
short[] cols2 = m_dfa.getRow (state2).getStates ();
int size = cols1.length;
int i;
// scan from the left side
for (i = 0; i < size; i++)
{
if (cols1[i] != cols2[i])
break;
}
if (i == size)
{
minMax[0] = 0;
minMax[1] = 0;
return 0;
}
minMax[0] = i;
// scan from the right side
for (i = size - 1; i >= 0; i--)
{
if (cols1[i] != cols2[i])
break;
}
minMax[1] = i;
return minMax[1] - minMax[0] + 1;
}
//
// obtain the min max of the processed DFA block
// and return the block size
//
private int getBlockSize (int state, int[] minMax)
{
short[] cols = m_dfaCopy.getRow (state).getStates ();
int size = cols.length;
int i;
// scan from left
for (i = 0; i < size; ++i)
if (cols[i] != SHORT_MIN)
break;
if (i == size)
{
minMax[0] = 0;
minMax[1] = 0;
return 0;
}
minMax[0] = i;
// scan from right
for (i = size - 1; i > 0; --i)
if (cols[i] != SHORT_MIN)
break;
minMax[1] = i;
return minMax[1] - minMax[0] + 1;
}
private int getHoleSize (int state, int min, int max)
{
int holes = 0;
short[] cols = m_dfaCopy.getRow (state).getStates ();
for (int i = min; i <= max; ++i)
if (cols[i] == SHORT_MIN)
++holes;
return holes;
}
//
// obtain the min max of the processed error block
// and return the block size
//
private int getErrorBlockSize (int state, int[] minMax)
{
short[] cols = m_errors.get (state - m_dfaCopy.size ()).getError ();
int size = cols.length;
int i;
for (i = 0; i < size; ++i)
if (cols[i] != SHORT_MIN)
break;
if (i == size)
{
minMax[0] = 0;
minMax[1] = 0;
return 0;
}
minMax[0] = i;
for (i = size - 1; i > 0; --i)
if (cols[i] != SHORT_MIN)
break;
minMax[1] = i;
return minMax[1] - minMax[0] + 1;
}
//
// determine # of holes needs to be filled in thisState
//
private int getErrorHoleSize (int state, int min, int max)
{
int holes = 0;
short[] cols = m_errors.get (state - m_dfaCopy.size ()).getError ();
for (int i = min; i <= max; ++i)
if (cols[i] == SHORT_MIN)
++holes;
return holes;
}
//
// findRepeat finds the most repeated value in a DFA row other
// than zero. If all values are zeros, then just count of zero
// is reported
//
private static int findRepeat (DFARow row, short[] repeatValue)
{
short lastRepeat;
int i;
int repeatCount;
int lastRepeatCount;
// first duplicate the column
short[] cols = row.getStates ().clone ();
Arrays.sort (cols);
int size = cols.length;
// then find the highest repeated value
lastRepeat = 0;
lastRepeatCount = 0;
repeatCount = 1;
// count the error states first
for (i = 0; i < size; i++)
{
if (cols[i] != 0)
break;
}
if (i == size) // hmm, all transitions are error transitions
{
repeatValue[0] = 0;
return i;
}
i++;
for (; i < size; i++)
{
if (cols[i] == cols[i - 1])
repeatCount++;
else
{
if (repeatCount > lastRepeatCount)
{
lastRepeatCount = repeatCount;
lastRepeat = cols[i - 1];
}
repeatCount = 1;
}
}
if (repeatCount > lastRepeatCount)
{
lastRepeatCount = repeatCount;
lastRepeat = cols[i - 1];
}
repeatValue[0] = lastRepeat;
return lastRepeatCount;
}
//
// figure out the error states
//
private short addErrorState (int thisState, short defaultValue)
{
ErrorVector ev = new ErrorVector (m_dfaCopy.getRow (thisState), defaultValue);
//
// Check if the state is all errors. If so, we could simply
// make the default state as SHRT_MIN
//
int i;
short[] cols = ev.getError ();
for (i = 0; i < cols.length; i++)
if (cols[i] != 0 && cols[i] != SHORT_MIN)
break;
if (i == cols.length)
return SHORT_MIN;
//
// then check if the error state is already in the error array
//
Short errorId = m_errorMap.get (ev);
if (errorId != null)
return errorId.shortValue ();
//
// so the error is new
//
m_errors.add (ev);
/*
//
// give the insignificant values some value
//
for (i = 0; i < cols.length; ++i)
{
if (cols[i] == SHORT_MIN)
cols[i] = defaultValue;
}
*/
//
// check if it creates new equivalent classes
//
m_ecsError.add (ev.getError ());
//
// then add the array iterator to the search set
//
errorId = new Short ((short)(m_dfaCopy.size () + m_errors.size () - 1));
m_errorMap.put (ev, errorId);
return errorId.shortValue ();
}
private void addBlock (short thisState)
{
int[] minMax = new int[2];
int blockSize = getBlockSize (thisState, minMax);
int min = minMax[0];
int max = minMax[1];
// don't have to do fill for block size of 0
if (blockSize > 0)
{
Integer holes = new Integer (getHoleSize (thisState, min, max));
Vector<Short> v = m_fillMap.get (holes);
if (v == null)
{
v = new Vector<Short> ();
m_fillMap.put (holes, v);
}
v.add (new Short (thisState));
}
}
//
// create block/hole information of state with a repeatValue
//
private void processStateRepeat (short state, short repeatValue, int repeatCount)
{
//
// check if there are enough repeats for the repeatValue.
// if not, we will disregard it as a repeatValue and saves
// us from storing the error state. This step also greatly
// reduces # of error state equivalent classes
//
if (repeatCount == 1 || repeatCount < GOODREPEAT)
{
repeatValue = 0;
m_default[state] = SHORT_MIN;
}
else
{
// we calculate an error state as well
m_default[state] = addErrorState (state, repeatValue);
}
cleanStateRepeat (state, repeatValue);
addBlock (state);
}
//
// create block/hole information of thisState using cmpState as
// the template.
//
private void processStateDiff (short thisState, short cmpState)
{
m_useStateDiff = true;
m_default[thisState] = cmpState;
cleanStateDiff (thisState, cmpState);
addBlock (thisState);
}
//
// shrink ErrorVector's in _errorArray and error vector's
// default
//
private void processErrorStates ()
{
int size = m_errors.size ();
int[] minMax = new int[2];
for (int i = 0; i < size; ++i)
{
ErrorVector ev = m_errors.get (i);
int errorGroups = m_ecsError.getGroupCount ();
short[] cols = ev.getError ();
short[] newArray = new short[errorGroups];
int[] groups = m_ecsError.getLookup ();
for (int j = 0; j < errorGroups; ++j)
{
if (cols[groups[j]] == 0)
newArray[j] = SHORT_MIN;
// newArray[j] = 0;
else
newArray[j] = cols[groups[j]];
}
ev.setError (newArray);
int stateNum = i + m_dfaCopy.size ();
int blockSize = getErrorBlockSize (stateNum, minMax);
// don't have to do fill for block size of 0
if (blockSize > 0)
{
Integer holes = new Integer (getErrorHoleSize (stateNum, minMax[0], minMax[1]));
Vector<Short> v = m_fillMap.get (holes);
if (v == null)
{
v = new Vector<Short> ();
m_fillMap.put (holes, v);
}
v.add (new Short ((short)stateNum));
}
}
}
//
// 1st parse each DFA states into _noCompressList, _fullCompressList
// and _blockListMap. For states that contain holes, also put it into
// _holeSet.
//
// 2nd step basically fills all holes in the _holeSet
//
private void processDFAStates ()
{
short i;
int repeatCount;
short[] repeatValue = new short[1];
//
// process each state to see the repeats, holes, etc
//
int diff;
short cmpState;
for (i = 0; i < m_dfaCopy.size (); i++)
{
repeatCount = findRepeat (m_dfaCopy.getRow (i), repeatValue);
int hardDiff = getErrorCount (i);
diff = getNonDefaultDiff (i, repeatValue[0]);
cmpState = i;
if (repeatCount < GOODREPEAT)
diff = hardDiff;
// looking for a state that minimizes the
// difference
if (repeatCount != m_rowSize)
{
int stateDiff = m_rowSize;
short stateCmp = 0;
// find the minimal difference state
for (short j = 0; j < i; ++j)
{
int d = getStateDiff (i, j);
if (d < stateDiff)
{
stateCmp = j;
stateDiff = d;
}
else if (d == stateDiff)
{
// for two same diff's, pick the smaller block size
int[] minMax = new int[2];
int b1, b2;
b1 = getStateDiffBlock (i, stateCmp, minMax);
b2 = getStateDiffBlock (i, j, minMax);
if (b2 < b1)
{
stateCmp = j;
stateDiff = d;
}
}
}
if (stateDiff < diff + BALANCE)
{
cmpState = stateCmp;
diff = stateDiff;
}
}
// process the DFA state and add it to error state where
// applicable
if (i == cmpState)
processStateRepeat (i, repeatValue[0], repeatCount);
// processStateRepeat (i, (short)0, 0);
else
processStateDiff (i, cmpState);
}
}
private boolean canFill (int state, int min, int max, int pos)
{
if (state < m_dfaCopy.size ())
{
int bound = m_next.length;
short[] column = m_dfaCopy.getRow (state).getStates ();
for (pos = pos + min; pos < bound && min <= max; ++pos, ++min)
{
if (column[min] != SHORT_MIN &&
m_next[pos] != SHORT_MIN)
return false;
}
return true;
}
else
{
int bound = m_next.length;
short[] column = m_errors.get (state - m_dfaCopy.size ()).getError ();
for (pos = pos + min; pos < bound && min <= max; ++pos, ++min)
{
if (column[min] != SHORT_MIN &&
m_next[pos] != SHORT_MIN)
return false;
}
return true;
}
}
//
// do the actual filling
//
private void doFill (short state, int min, int max, int pos)
{
int bound = pos + max + 1;
// allocate space if necessary
if (bound > m_next.length)
{
m_next = resize (m_next, bound, SHORT_MIN);
m_check = resize (m_check, bound, SHORT_MIN);
}
m_base[state] = (short)pos;
// now do the fill
if (state < m_dfaCopy.size ())
{
short[] column = m_dfaCopy.getRow (state).getStates ();
for (pos = pos + min; min <= max; ++pos, ++min)
{
if (column[min] != SHORT_MIN)
{
m_next[pos] = column[min];
m_check[pos] = state;
}
}
}
else
{
short[] column = m_errors.get (state - m_dfaCopy.size ()).getError ();
for (pos = pos + min; min <= max; ++pos, ++min)
{
if (column[min] != SHORT_MIN)
{
m_next[pos] = column[min];
m_check[pos] = state;
}
}
}
}
private void doFillState (short state)
{
// pretty dumb algorithm here
//
// just going through all the holes and see
// if there is an available position
int[] minMax = new int[2];
if (state < m_dfaCopy.size ())
getBlockSize (state, minMax);
else
getErrorBlockSize (state, minMax);
int size = m_next.length;
int min = minMax[0];
int max = minMax[1];
for (int i = 0; i < size; ++i)
{
if (canFill (state, min, max, i))
{
doFill (state, min, max, i);
return;
}
}
doFill (state, min, max, size);
}
private void doFillStates ()
{
Integer[] holeSizes = m_fillMap.keySet ().toArray (new Integer[m_fillMap.size ()]);
// fill the states from the biggest to the smallest
for (int i = holeSizes.length - 1; i >= 0; --i)
{
Integer holeSize = holeSizes[i];
for (Short state : m_fillMap.get (holeSize))
doFillState (state.shortValue ());
}
}
void compute ()
{
m_next = new short[0];
m_check = new short[0];
m_default = resize (m_default, m_dfaCopy.size (), SHORT_MIN);
m_base = resize (m_base, m_dfaCopy.size (), (short)0);
processDFAStates ();
//
// expand the base/default array to count error vectors
//
m_base = resize (m_base, m_dfaCopy.size () + m_errors.size (), (short)0);
m_default = resize (m_default, m_dfaCopy.size () + m_errors.size (), SHORT_MIN);
//
// determine if _yy_meta is necessary
//
if (m_ecsError.getGroupCount () <= 1)
m_useMeta = false;
else
m_useMeta = true;
//
// determine if we really needs error states
//
if (!m_useMeta && !m_useStateDiff)
m_useError = false;
else
m_useError = true;
//
// process the error states
//
if (m_useError)
processErrorStates ();
//
// fill the table
//
doFillStates ();
//
// check if we really needs m_default
//
m_useDefault = false;
for (int i = 0; i < m_default.length; ++i)
if (m_default[i] != SHORT_MIN)
{
m_useDefault = true;
break;
}
if (!m_useDefault)
m_default = new short[0];
//
// check if the indices at the end are all valid
//
int expand;
if (m_useDefault)
expand = m_ecsError.getGroupCount (); // reserve space for default error state
else
expand = 0;
for (int i = m_next.length - m_rowSize; i < m_next.length; ++i)
{
if (i < 0)
i = 0;
if (m_check[i] == SHORT_MIN)
continue;
if (m_check[i] < m_dfaCopy.size ())
{
if (m_base[m_check[i]] + m_rowSize > m_next.length + expand)
{
expand = m_base[m_check[i]] + m_rowSize - m_next.length;
}
}
else
{
if (m_base[m_check[i]] + m_ecsError.getGroupCount () >
m_next.length + expand)
{
expand = m_base[m_check[i]] + m_ecsError.getGroupCount () - m_next.length;
}
}
}
short defaultError = (short)m_base.length;
if (m_useDefault)
{
m_default = resize (m_default, m_default.length + 1, defaultError);
m_base = resize (m_base, m_base.length + 1, (short)m_next.length);
}
m_next = resize (m_next, m_next.length + expand, (short)0);
m_check = resize (m_check, m_next.length, defaultError);
//
// now processm_default
//
if (m_useDefault)
{
if (m_useError)
for (int i = 0; i < m_default.length; ++i)
{
if (m_default[i] == SHORT_MIN)
m_default[i] = defaultError;
}
else
for (int i = 0; i < m_default.length; ++i)
{
if (m_default[i] == SHORT_MIN)
m_default[i] = 0;
else
m_default[i] = m_errors.get (m_default[i] - m_dfaCopy.size ()).getDefaultValue ();
}
}
else
m_default = null;
//
// process all SHRT_MIN in m_check and m_next to 0
//
for (int i = 0; i < m_check.length; ++i)
{
if (m_check[i] == SHORT_MIN)
m_check[i] = defaultError;
if (m_next[i] == SHORT_MIN)
m_next[i] = 0;
}
}
short[] getNext ()
{
return m_next;
}
short[] getCheck ()
{
return m_check;
}
short[] getBase ()
{
return m_base;
}
short[] getDefault ()
{
return m_default;
}
short[] getMeta ()
{
if (m_useDefault && m_useMeta)
{
int[] groups = m_ecsError.getGroups ();
short[] meta = new short[groups.length];
for (int i = 0; i < groups.length; ++i)
meta[i] = (short)groups[i];
return meta;
}
return null;
}
boolean getError ()
{
return m_useError;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.util;
import org.apache.flink.core.memory.MemorySegment;
import org.apache.flink.core.memory.MemorySegmentFactory;
import org.apache.flink.table.dataformat.BinaryRow;
import org.apache.flink.table.dataformat.BinaryRowTest;
import org.apache.flink.table.dataformat.DataFormatTestUtil;
import org.apache.flink.table.dataformat.util.BinaryRowUtil;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.flink.table.dataformat.util.BinaryRowUtil.BYTE_ARRAY_BASE_OFFSET;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
/**
* Test for {@link SegmentsUtil}, most is covered by {@link BinaryRowTest},
* this just test some boundary scenarios testing.
*/
public class SegmentsUtilTest {
@Test
public void testCopy() {
// test copy the content of the latter Seg
MemorySegment[] segments = new MemorySegment[2];
segments[0] = MemorySegmentFactory.wrap(new byte[]{0, 2, 5});
segments[1] = MemorySegmentFactory.wrap(new byte[]{6, 12, 15});
byte[] bytes = SegmentsUtil.copyToBytes(segments, 4, 2);
Assert.assertArrayEquals(new byte[] {12, 15}, bytes);
}
@Test
public void testEquals() {
// test copy the content of the latter Seg
MemorySegment[] segments1 = new MemorySegment[3];
segments1[0] = MemorySegmentFactory.wrap(new byte[]{0, 2, 5});
segments1[1] = MemorySegmentFactory.wrap(new byte[]{6, 12, 15});
segments1[2] = MemorySegmentFactory.wrap(new byte[]{1, 1, 1});
MemorySegment[] segments2 = new MemorySegment[2];
segments2[0] = MemorySegmentFactory.wrap(new byte[]{6, 0, 2, 5});
segments2[1] = MemorySegmentFactory.wrap(new byte[]{6, 12, 15, 18});
assertTrue(SegmentsUtil.equalsMultiSegments(segments1, 0, segments2, 0, 0));
assertTrue(SegmentsUtil.equals(segments1, 0, segments2, 1, 3));
assertTrue(SegmentsUtil.equals(segments1, 0, segments2, 1, 6));
assertFalse(SegmentsUtil.equals(segments1, 0, segments2, 1, 7));
}
@Test
public void testBoundaryByteArrayEquals() {
byte[] bytes1 = new byte[5];
bytes1[3] = 81;
byte[] bytes2 = new byte[100];
bytes2[3] = 81;
bytes2[4] = 81;
assertTrue(BinaryRowUtil.byteArrayEquals(bytes1, bytes2, 4));
assertFalse(BinaryRowUtil.byteArrayEquals(bytes1, bytes2, 5));
assertTrue(BinaryRowUtil.byteArrayEquals(bytes1, bytes2, 0));
}
@Test
public void testBoundaryEquals() {
BinaryRow row24 = DataFormatTestUtil.get24BytesBinaryRow();
BinaryRow row160 = DataFormatTestUtil.get160BytesBinaryRow();
BinaryRow varRow160 = DataFormatTestUtil.getMultiSeg160BytesBinaryRow(row160);
BinaryRow varRow160InOne = DataFormatTestUtil.getMultiSeg160BytesInOneSegRow(row160);
assertEquals(row160, varRow160InOne);
assertEquals(varRow160, varRow160InOne);
assertEquals(row160, varRow160);
assertEquals(varRow160InOne, varRow160);
assertNotEquals(row24, row160);
assertNotEquals(row24, varRow160);
assertNotEquals(row24, varRow160InOne);
assertTrue(SegmentsUtil.equals(row24.getSegments(), 0, row160.getSegments(), 0, 0));
assertTrue(SegmentsUtil.equals(row24.getSegments(), 0, varRow160.getSegments(), 0, 0));
// test var segs
MemorySegment[] segments1 = new MemorySegment[2];
segments1[0] = MemorySegmentFactory.wrap(new byte[32]);
segments1[1] = MemorySegmentFactory.wrap(new byte[32]);
MemorySegment[] segments2 = new MemorySegment[3];
segments2[0] = MemorySegmentFactory.wrap(new byte[16]);
segments2[1] = MemorySegmentFactory.wrap(new byte[16]);
segments2[2] = MemorySegmentFactory.wrap(new byte[16]);
segments1[0].put(9, (byte) 1);
assertFalse(SegmentsUtil.equals(segments1, 0, segments2, 14, 14));
segments2[1].put(7, (byte) 1);
assertTrue(SegmentsUtil.equals(segments1, 0, segments2, 14, 14));
assertTrue(SegmentsUtil.equals(segments1, 2, segments2, 16, 14));
assertTrue(SegmentsUtil.equals(segments1, 2, segments2, 16, 16));
segments2[2].put(7, (byte) 1);
assertTrue(SegmentsUtil.equals(segments1, 2, segments2, 32, 14));
}
@Test
public void testBoundaryCopy() {
MemorySegment[] segments1 = new MemorySegment[2];
segments1[0] = MemorySegmentFactory.wrap(new byte[32]);
segments1[1] = MemorySegmentFactory.wrap(new byte[32]);
segments1[0].put(15, (byte) 5);
segments1[1].put(15, (byte) 6);
{
byte[] bytes = new byte[64];
MemorySegment[] segments2 = new MemorySegment[]{MemorySegmentFactory.wrap(bytes)};
SegmentsUtil.copyToBytes(segments1, 0, bytes, 0, 64);
assertTrue(SegmentsUtil.equals(segments1, 0, segments2, 0, 64));
}
{
byte[] bytes = new byte[64];
MemorySegment[] segments2 = new MemorySegment[]{MemorySegmentFactory.wrap(bytes)};
SegmentsUtil.copyToBytes(segments1, 32, bytes, 0, 14);
assertTrue(SegmentsUtil.equals(segments1, 32, segments2, 0, 14));
}
{
byte[] bytes = new byte[64];
MemorySegment[] segments2 = new MemorySegment[]{MemorySegmentFactory.wrap(bytes)};
SegmentsUtil.copyToBytes(segments1, 34, bytes, 0, 14);
assertTrue(SegmentsUtil.equals(segments1, 34, segments2, 0, 14));
}
}
@Test
public void testCopyToUnsafe() {
MemorySegment[] segments1 = new MemorySegment[2];
segments1[0] = MemorySegmentFactory.wrap(new byte[32]);
segments1[1] = MemorySegmentFactory.wrap(new byte[32]);
segments1[0].put(15, (byte) 5);
segments1[1].put(15, (byte) 6);
{
byte[] bytes = new byte[64];
MemorySegment[] segments2 = new MemorySegment[]{MemorySegmentFactory.wrap(bytes)};
SegmentsUtil.copyToUnsafe(segments1, 0, bytes, BYTE_ARRAY_BASE_OFFSET, 64);
assertTrue(SegmentsUtil.equals(segments1, 0, segments2, 0, 64));
}
{
byte[] bytes = new byte[64];
MemorySegment[] segments2 = new MemorySegment[]{MemorySegmentFactory.wrap(bytes)};
SegmentsUtil.copyToUnsafe(segments1, 32, bytes, BYTE_ARRAY_BASE_OFFSET, 14);
assertTrue(SegmentsUtil.equals(segments1, 32, segments2, 0, 14));
}
{
byte[] bytes = new byte[64];
MemorySegment[] segments2 = new MemorySegment[]{MemorySegmentFactory.wrap(bytes)};
SegmentsUtil.copyToUnsafe(segments1, 34, bytes, BYTE_ARRAY_BASE_OFFSET, 14);
assertTrue(SegmentsUtil.equals(segments1, 34, segments2, 0, 14));
}
}
@Test
public void testFind() {
MemorySegment[] segments1 = new MemorySegment[2];
segments1[0] = MemorySegmentFactory.wrap(new byte[32]);
segments1[1] = MemorySegmentFactory.wrap(new byte[32]);
MemorySegment[] segments2 = new MemorySegment[3];
segments2[0] = MemorySegmentFactory.wrap(new byte[16]);
segments2[1] = MemorySegmentFactory.wrap(new byte[16]);
segments2[2] = MemorySegmentFactory.wrap(new byte[16]);
assertEquals(34, SegmentsUtil.find(segments1, 34, 0, segments2, 0, 0));
assertEquals(-1, SegmentsUtil.find(segments1, 34, 0, segments2, 0, 15));
}
}
| |
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openehealth.ipf.platform.camel.ihe.ws;
import static org.apache.cxf.message.Message.PROTOCOL_HEADERS;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.namespace.QName;
import org.apache.camel.Message;
import org.apache.cxf.headers.Header;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.jaxws.context.WrappedMessageContext;
/**
* Utilities for handling HTTP and SOAP headers in Web Service interactions.
* @author Dmytro Rud
*/
abstract public class HeaderUtils {
private HeaderUtils() {
throw new IllegalStateException("Cannot instantiate utility class");
}
private static interface DefaultValueFactory<T> {
T createDefaultValue();
}
private static final DefaultValueFactory<Map<String, List<String>>> HTTP_HEADERS_CONTAINER_FACTORY =
new DefaultValueFactory<Map<String, List<String>>>() {
@Override
public Map<String, List<String>> createDefaultValue() {
return new HashMap<String, List<String>>();
}
};
private static final DefaultValueFactory<List<Header>> SOAP_HEADERS_CONTAINER_FACTORY =
new DefaultValueFactory<List<Header>>() {
@Override
public List<Header> createDefaultValue() {
return new ArrayList<Header>();
}
};
public static void processIncomingHeaders(
Map<String, Object> messageContext,
Message message)
{
processIncomingHttpHeaders(messageContext, message);
processIncomingSoapHeaders(messageContext, message);
}
public static void processUserDefinedOutgoingHeaders(
Map<String, Object> messageContext,
Message message,
boolean isRequest)
{
processUserDefinedOutgoingHttpHeaders(messageContext, message, isRequest);
processUserDefinedOutgoingSoapHeaders(messageContext, message, isRequest);
}
/**
* Returns headers of the message represented by the given context.
*
* @param <T>
* type of headers' container.
* @param messageContext
* Web Service message context.
* @param key
* key under which the headers reside in the message context.
* @param useInputMessage
* whether input message should the used.
* @param needCreateWhenNotExist
* whether the headers' map should be created when it does
* not exist.
* @param defaultValueFactory
* factory for producing default values.
* @return
* either the map of HTTP headers as found in the message context,
* or a newly created map when none found, or <code>null</code>
* when creation of a new map is not allowed.
*/
@SuppressWarnings("unchecked")
private static <T> T getHeaders(
Map<String, Object> messageContext,
String key,
boolean useInputMessage,
boolean needCreateWhenNotExist,
DefaultValueFactory<T> defaultValueFactory)
{
WrappedMessageContext wrappedContext = (WrappedMessageContext) messageContext;
Map<String, Object> headersContainer = useInputMessage
? wrappedContext.getWrappedMap()
: wrappedContext.getWrappedMessage().getExchange().getOutMessage();
T headers = (T) headersContainer.get(key);
if ((headers == null) && needCreateWhenNotExist) {
headers = defaultValueFactory.createDefaultValue();
headersContainer.put(key, headers);
}
return headers;
}
/**
* Stores a map of incoming SOAP headers from the given
* Web Service message context into the Camel header
* {@link AbstractWsEndpoint#INCOMING_SOAP_HEADERS}
* of the given Camel message.
*
* @param messageContext
* Web Service message contents.
* @param message
* Camel message in whose headers the
* SOAP headers should be stored.
*/
private static void processIncomingSoapHeaders(
Map<String, Object> messageContext,
Message message)
{
HashMap<QName, Header> userHeaders = new HashMap<QName, Header>();
List<Header> soapHeaders = getHeaders(
messageContext, Header.HEADER_LIST, true, false, null);
if (soapHeaders != null) {
for (Header soapHeader : soapHeaders) {
userHeaders.put(soapHeader.getName(), soapHeader);
}
}
message.setHeader(AbstractWsEndpoint.INCOMING_SOAP_HEADERS, userHeaders);
}
/**
* Injects user-defined SOAP headers from the header
* {@link AbstractWsEndpoint#OUTGOING_SOAP_HEADERS}
* of the given Camel message into the given Web Service
* message context.
*
* @param messageContext
* Web Service message contents.
* @param message
* Camel message from whose headers the
* SOAP headers should be taken.
* @param isRequest
* whether the Web Service message under consideration
* is a request one (<code>false</code> on server side,
* <code>true</code> on client side).
*/
private static void processUserDefinedOutgoingSoapHeaders(
Map<String, Object> messageContext,
Message message,
boolean isRequest)
{
List<Header> userHeaders = CastUtils.cast(
message.getHeader(AbstractWsEndpoint.OUTGOING_SOAP_HEADERS, List.class));
if ((userHeaders != null) && ! userHeaders.isEmpty()) {
List<Header> soapHeaders = getHeaders(
messageContext, Header.HEADER_LIST, isRequest, true, SOAP_HEADERS_CONTAINER_FACTORY);
soapHeaders.addAll(userHeaders);
}
}
/**
* Stores a map of incoming HTTP headers from the given
* Web Service message context into the Camel header
* {@link AbstractWsEndpoint#INCOMING_HTTP_HEADERS}
* of the given Camel message.
*
* @param messageContext
* Web Service message contents.
* @param message
* Camel message in whose headers the
* HTTP headers should be stored.
*/
private static void processIncomingHttpHeaders(
Map<String, Object> messageContext,
Message message)
{
Map<String, String> userHeaders = new HashMap<String, String>();
Map<String, List<String>> httpHeaders = getHeaders(
messageContext, PROTOCOL_HEADERS, true, false, null);
for (Map.Entry<String, List<String>> entry : httpHeaders.entrySet()) {
userHeaders.put(entry.getKey(), entry.getValue().get(0));
}
message.setHeader(AbstractWsEndpoint.INCOMING_HTTP_HEADERS, userHeaders);
}
/**
* Injects user-defined HTTP headers from the header
* {@link AbstractWsEndpoint#OUTGOING_HTTP_HEADERS}
* of the given Camel message into the given Web Service
* message context.
*
* @param messageContext
* Web Service message contents.
* @param message
* Camel message from whose headers the
* HTTP headers should be taken.
* @param isRequest
* whether the Web Service message under consideration
* is a request one (<code>false</code> on server side,
* <code>true</code> on client side).
*/
private static void processUserDefinedOutgoingHttpHeaders(
Map<String, Object> messageContext,
Message message,
boolean isRequest)
{
Map<String, String> headers = CastUtils.cast(
message.getHeader(AbstractWsEndpoint.OUTGOING_HTTP_HEADERS, Map.class));
if ((headers != null) && ! headers.isEmpty()) {
Map<String, List<String>> httpHeaders = getHeaders(
messageContext, PROTOCOL_HEADERS, isRequest, true, HTTP_HEADERS_CONTAINER_FACTORY);
for (Map.Entry<String, String> entry : headers.entrySet()) {
httpHeaders.put(entry.getKey(), Collections.singletonList(entry.getValue()));
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.traversal.step.map;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.tinkerpop.gremlin.LoadGraphWith;
import org.apache.tinkerpop.gremlin.process.AbstractGremlinProcessTest;
import org.apache.tinkerpop.gremlin.process.GremlinProcessRunner;
import org.apache.tinkerpop.gremlin.process.IgnoreEngine;
import org.apache.tinkerpop.gremlin.process.computer.traversal.step.map.PageRank;
import org.apache.tinkerpop.gremlin.process.computer.traversal.step.map.TraversalVertexProgramStep;
import org.apache.tinkerpop.gremlin.process.traversal.P;
import org.apache.tinkerpop.gremlin.process.traversal.Step;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalEngine;
import org.apache.tinkerpop.gremlin.process.traversal.Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.process.traversal.step.Profiling;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.ProfileStep;
import org.apache.tinkerpop.gremlin.process.traversal.strategy.optimization.CountStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.strategy.optimization.LazyBarrierStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.strategy.optimization.RepeatUnrollStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ComputerVerificationStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.VerificationException;
import org.apache.tinkerpop.gremlin.process.traversal.util.Metrics;
import org.apache.tinkerpop.gremlin.process.traversal.util.MutableMetrics;
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalHelper;
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalMetrics;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.GRATEFUL;
import static org.apache.tinkerpop.gremlin.LoadGraphWith.GraphData.MODERN;
import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.both;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeThat;
/**
* @author Bob Briody (http://bobbriody.com)
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
@RunWith(GremlinProcessRunner.class)
public abstract class ProfileTest extends AbstractGremlinProcessTest {
private static final String METRICS_KEY = "metrics";
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_out_out_profile();
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_repeatXbothX_timesX3X_profile();
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile();
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_whereXinXcreatedX_count_isX1XX_name_profile();
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profile();
public abstract Traversal<Vertex, Vertex> get_g_V_out_out_profileXmetricsX();
public abstract Traversal<Vertex, Vertex> get_g_V_repeatXbothX_timesX3X_profileXmetricsX();
public abstract Traversal<Vertex, Vertex> get_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profileXmetricsX();
public abstract Traversal<Vertex, String> get_g_V_whereXinXcreatedX_count_isX1XX_name_profileXmetricsX();
public abstract Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profileXmetricsX();
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_hasLabelXpersonX_pageRank_withXpropertyName_rankX_withXedges_bothEX_rank_profile();
public abstract Traversal<Vertex, TraversalMetrics> get_g_V_groupXmX_profile();
@Override
protected void afterLoadGraphWith(final Graph graph) throws Exception {
// profile() does some explicit counting which goes off with LazyBarrierStrategy in place.
g = g.withoutStrategies(LazyBarrierStrategy.class);
}
@Test
@LoadGraphWith(MODERN)
public void modern_V_out_out_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_out_out_profile();
printTraversalForm(traversal);
validate_g_V_out_out_profile_modern(traversal, traversal.next());
}
@Test
@LoadGraphWith(MODERN)
public void modern_V_out_out_profileXmetricsX() {
final Traversal<Vertex, Vertex> traversal = get_g_V_out_out_profileXmetricsX();
printTraversalForm(traversal);
traversal.iterate();
validate_g_V_out_out_profile_modern(traversal, traversal.asAdmin().getSideEffects().get(METRICS_KEY));
}
private void validate_g_V_out_out_profile_modern(final Traversal traversal, final TraversalMetrics traversalMetrics) {
traversalMetrics.toString(); // ensure no exceptions are thrown
assumeThat("The following assertions apply to TinkerGraph only as provider strategies can alter the steps to not comply with expectations",
graph.getClass().getSimpleName(), equalTo("TinkerGraph"));
Metrics metrics = traversalMetrics.getMetrics(0);
assertEquals(6, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertEquals(6, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
metrics = traversalMetrics.getMetrics(1);
assertEquals(6, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertNotEquals(0, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
metrics = traversalMetrics.getMetrics(2);
assertEquals(2, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertNotEquals(0, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
if (!onGraphComputer(traversal.asAdmin())) {
// Every other step should be a Profile step
List<Step> steps = traversal.asAdmin().getSteps();
for (int ii = 1; ii <= 6; ii += 2) {
assertEquals("Every other Step should be a ProfileStep.", ProfileStep.class, steps.get(ii).getClass());
}
}
}
////////////////////
@Test
@LoadGraphWith(GRATEFUL)
public void grateful_V_out_out_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_out_out_profile();
printTraversalForm(traversal);
final TraversalMetrics traversalMetrics = traversal.next();
validate_g_V_out_out_profile_grateful(traversalMetrics);
}
@Test
@LoadGraphWith(GRATEFUL)
public void grateful_V_out_out_profileXmetricsX() {
final Traversal<Vertex, Vertex> traversal = get_g_V_out_out_profileXmetricsX();
printTraversalForm(traversal);
traversal.iterate();
final TraversalMetrics traversalMetrics = traversal.asAdmin().getSideEffects().get(METRICS_KEY);
validate_g_V_out_out_profile_grateful(traversalMetrics);
}
private void validate_g_V_out_out_profile_grateful(final TraversalMetrics traversalMetrics) {
traversalMetrics.toString(); // ensure no exceptions are thrown
assumeThat("The following assertions apply to TinkerGraph only as provider strategies can alter the steps to not comply with expectations",
graph.getClass().getSimpleName(), equalTo("TinkerGraph"));
Metrics metrics = traversalMetrics.getMetrics(0);
assertEquals(808, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertEquals(808, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertTrue("Percent duration should be positive.", (Double) metrics.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY) >= 0);
assertTrue("Times should be positive.", metrics.getDuration(TimeUnit.MICROSECONDS) >= 0);
metrics = traversalMetrics.getMetrics(1);
assertEquals(8049, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertNotEquals(0, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertTrue("Percent duration should be positive.", (Double) metrics.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY) >= 0);
assertTrue("Times should be positive.", metrics.getDuration(TimeUnit.MICROSECONDS) >= 0);
metrics = traversalMetrics.getMetrics(2);
assertEquals(327370, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertNotEquals(0, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertTrue("Percent duration should be positive.", (Double) metrics.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY) >= 0);
assertTrue("Times should be positive.", metrics.getDuration(TimeUnit.MICROSECONDS) >= 0);
double totalPercentDuration = 0;
for (Metrics m : traversalMetrics.getMetrics()) {
totalPercentDuration += (Double) m.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY);
}
assertEquals(100, totalPercentDuration, 0.000001);
}
///////////////////
@Test
@LoadGraphWith(MODERN)
@IgnoreEngine(TraversalEngine.Type.COMPUTER)
public void g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile();
printTraversalForm(traversal);
// This assertion is really only meant for tinkergraph
if (graph.getClass().getSimpleName().equals("TinkerGraph"))
assertEquals("There should be 8 steps in this traversal (counting injected profile steps).", 8, traversal.asAdmin().getSteps().size());
final TraversalMetrics traversalMetrics = traversal.next();
validate_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile(traversalMetrics);
}
@Test
@LoadGraphWith(MODERN)
@IgnoreEngine(TraversalEngine.Type.COMPUTER)
public void g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profileXmetricsX() {
final Traversal<Vertex, Vertex> traversal = get_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profileXmetricsX();
printTraversalForm(traversal);
traversal.iterate();
// This assertion is really only meant for tinkergraph
if (graph.getClass().getSimpleName().equals("TinkerGraph"))
assertEquals("There should be 7 steps in this traversal (counting injected profile steps).", 7, traversal.asAdmin().getSteps().size());
final TraversalMetrics traversalMetrics = traversal.asAdmin().getSideEffects().get(METRICS_KEY);
validate_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile(traversalMetrics);
}
private void validate_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile(final TraversalMetrics traversalMetrics) {
traversalMetrics.toString(); // ensure no exceptions are thrown
assumeThat("The following assertions apply to TinkerGraph only as provider strategies can alter the steps to not comply with expectations",
graph.getClass().getSimpleName(), equalTo("TinkerGraph"));
// Grab the second (sideEffect{sleep}) step and check the times.
Metrics metrics = traversalMetrics.getMetrics(1);
// 6 elements w/ a 10ms sleep each = 60ms with 10ms for other computation.
assertTrue("Duration should be at least the length of the sleep (59ms): " + metrics.getDuration(TimeUnit.MILLISECONDS),
metrics.getDuration(TimeUnit.MILLISECONDS) >= 59);
// 6 elements w/ a 5ms sleep each = 30ms plus 20ms for other computation
metrics = traversalMetrics.getMetrics(2);
assertTrue("Duration should be at least the length of the sleep (29ms): " + metrics.getDuration(TimeUnit.MILLISECONDS),
metrics.getDuration(TimeUnit.MILLISECONDS) >= 29);
double totalPercentDuration = 0;
for (Metrics m : traversalMetrics.getMetrics()) {
totalPercentDuration += (Double) m.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY);
}
assertEquals(100, totalPercentDuration, 0.000001);
}
///////////////////
@Test
@LoadGraphWith(MODERN)
public void g_V_repeat_both_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_repeatXbothX_timesX3X_profile();
printTraversalForm(traversal);
final TraversalMetrics traversalMetrics = traversal.next();
validate_g_V_repeat_both_modern_profile(traversalMetrics,
traversal.asAdmin().getStrategies().getStrategy(RepeatUnrollStrategy.class).isPresent() &&
!traversal.asAdmin().getStrategies().getStrategy(ComputerVerificationStrategy.class).isPresent());
}
@Test
@LoadGraphWith(MODERN)
public void g_V_repeat_both_profileXmetricsX() {
final Traversal<Vertex, Vertex> traversal = get_g_V_repeatXbothX_timesX3X_profileXmetricsX();
printTraversalForm(traversal);
traversal.iterate();
final TraversalMetrics traversalMetrics = traversal.asAdmin().getSideEffects().get(METRICS_KEY);
validate_g_V_repeat_both_modern_profile(traversalMetrics,
traversal.asAdmin().getStrategies().getStrategy(RepeatUnrollStrategy.class).isPresent() &&
!traversal.asAdmin().getStrategies().getStrategy(ComputerVerificationStrategy.class).isPresent());
}
private void validate_g_V_repeat_both_modern_profile(final TraversalMetrics traversalMetrics, final boolean withRepeatUnrollStrategy) {
traversalMetrics.toString(); // ensure no exceptions are thrown
assumeThat("The following assertions apply to TinkerGraph only as provider strategies can alter the steps to not comply with expectations",
graph.getClass().getSimpleName(), equalTo("TinkerGraph"));
Metrics metrics = traversalMetrics.getMetrics(0);
assertEquals(6, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertEquals(6, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
metrics = traversalMetrics.getMetrics(1);
assertEquals(withRepeatUnrollStrategy ? 12 : 72, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertNotEquals(0, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
if (!withRepeatUnrollStrategy)
assertTrue("Count should be greater than traversers.", metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID) > metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertTrue("Percent duration should be positive.", (Double) metrics.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY) >= 0);
assertTrue("Times should be positive.", metrics.getDuration(TimeUnit.MICROSECONDS) >= 0);
// Test the nested global metrics of the repeat step
if (!withRepeatUnrollStrategy) {
final Metrics vertexStepNestedInRepeat = (Metrics) metrics.getNested().toArray()[0];
assertEquals(114, vertexStepNestedInRepeat.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
assertNotEquals(0, vertexStepNestedInRepeat.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertTrue("Count should be greater than traversers.", vertexStepNestedInRepeat.getCount(TraversalMetrics.ELEMENT_COUNT_ID) > vertexStepNestedInRepeat.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertTrue("Times should be positive.", vertexStepNestedInRepeat.getDuration(TimeUnit.MICROSECONDS) >= 0);
}
double totalPercentDuration = 0;
for (Metrics m : traversalMetrics.getMetrics()) {
totalPercentDuration += (Double) m.getAnnotation(TraversalMetrics.PERCENT_DURATION_KEY);
}
assertEquals(100, totalPercentDuration, 0.000001);
}
/////////////
private void validate_g_V_whereXinXcreatedX_count_isX1XX_name_profile(final Traversal traversal, final TraversalMetrics traversalMetrics) {
traversalMetrics.toString(); // ensure no exceptions are thrown
assumeThat("The following assertions apply to TinkerGraph only as provider strategies can alter the steps to not comply with expectations",
graph.getClass().getSimpleName(), equalTo("TinkerGraph"));
assertEquals("There should be 3 top-level metrics.", 3, traversalMetrics.getMetrics().size());
Metrics metrics = traversalMetrics.getMetrics(0);
assertEquals(6, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertEquals(6, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
metrics = traversalMetrics.getMetrics(1);
assertEquals(1, metrics.getCount(TraversalMetrics.TRAVERSER_COUNT_ID).longValue());
assertEquals(1, metrics.getCount(TraversalMetrics.ELEMENT_COUNT_ID).longValue());
if (traversal.asAdmin().getStrategies().getStrategy(CountStrategy.class).isPresent()) {
assertEquals("Metrics 1 should have 4 nested metrics.", 4, metrics.getNested().size());
} else {
assertEquals("Metrics 1 should have 3 nested metrics.", 3, metrics.getNested().size());
}
}
@Test
@LoadGraphWith(MODERN)
public void g_V_whereXinXcreatedX_count_isX1XX_name_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_whereXinXcreatedX_count_isX1XX_name_profile();
printTraversalForm(traversal);
final TraversalMetrics traversalMetrics = traversal.next();
validate_g_V_whereXinXcreatedX_count_isX1XX_name_profile(traversal, traversalMetrics);
}
@Test
@LoadGraphWith(MODERN)
public void g_V_whereXinXcreatedX_count_isX1XX_name_profileXmetricsX() {
final Traversal<Vertex, String> traversal = get_g_V_whereXinXcreatedX_count_isX1XX_name_profileXmetricsX();
printTraversalForm(traversal);
traversal.iterate();
final TraversalMetrics traversalMetrics = traversal.asAdmin().getSideEffects().get(METRICS_KEY);
validate_g_V_whereXinXcreatedX_count_isX1XX_name_profile(traversal, traversalMetrics);
}
/**
* ProfileStrategy callback test. Goal: ensure that a step that implements Profileable gets a callback to setMetrics
*/
// Setup a "mock" step to test the strategy
static public class MockStep extends FlatMapStep<Vertex, Vertex> implements Profiling {
public static boolean callbackCalled = false;
public MockStep(final Traversal.Admin traversal) {
super(traversal);
}
@Override
protected Iterator<Vertex> flatMap(final Traverser.Admin<Vertex> traverser) {
List<Vertex> l = new ArrayList<>();
l.add(traverser.get());
return l.iterator();
}
@Override
public void setMetrics(final MutableMetrics parentMetrics) {
if (parentMetrics != null) {
callbackCalled = true;
parentMetrics.setCount("bogusCount", 100);
}
}
}
@Test
@LoadGraphWith(MODERN)
public void testProfileStrategyCallback() {
final Traversal<Vertex, TraversalMetrics> t = get_g_V_out_out_profile();
MockStep mockStep = new MockStep(t.asAdmin());
t.asAdmin().addStep(3, mockStep);
TraversalMetrics traversalMetrics = t.next();
assertTrue(mockStep.callbackCalled);
if (!onGraphComputer(t.asAdmin())) {
assertEquals(100, traversalMetrics.getMetrics(3).getCount("bogusCount").longValue());
}
}
@Test
@LoadGraphWith(MODERN)
public void testProfileStrategyCallbackSideEffect() {
final Traversal<Vertex, Vertex> t = get_g_V_out_out_profileXmetricsX();
MockStep mockStep = new MockStep(t.asAdmin());
t.asAdmin().addStep(3, mockStep);
t.iterate();
assertTrue(mockStep.callbackCalled);
if (!onGraphComputer(t.asAdmin())) {
final TraversalMetrics traversalMetrics = t.asAdmin().getSideEffects().get(METRICS_KEY);
assertEquals(100, traversalMetrics.getMetrics(3).getCount("bogusCount").longValue());
}
}
@Test
@LoadGraphWith(MODERN)
public void g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profile();
printTraversalForm(traversal);
traversal.iterate();
}
@Test
@LoadGraphWith(MODERN)
public void g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profileXmetricsX() {
final Traversal<Vertex, Map<String, String>> traversal = get_g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profileXmetricsX();
printTraversalForm(traversal);
traversal.iterate();
}
@Test
@LoadGraphWith(MODERN)
@IgnoreEngine(TraversalEngine.Type.STANDARD)
public void g_V_hasLabelXpersonX_pageRank_withXpropertyName_rankX_withXedges_bothEX_rank_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_hasLabelXpersonX_pageRank_withXpropertyName_rankX_withXedges_bothEX_rank_profile();
//printTraversalForm(traversal);
try {
traversal.iterate();
fail("Should have tossed an exception because multi-OLAP is unsolvable");
} catch (Exception ex) {
assertTrue(ex instanceof VerificationException || ExceptionUtils.getRootCause(ex) instanceof VerificationException);
}
}
@Test
@LoadGraphWith(MODERN)
public void g_V_groupXmX_profile() {
final Traversal<Vertex, TraversalMetrics> traversal = get_g_V_groupXmX_profile();
printTraversalForm(traversal);
traversal.next();
assertFalse(traversal.hasNext());
}
private static boolean onGraphComputer(final Traversal.Admin<?, ?> traversal) {
return !TraversalHelper.getStepsOfClass(TraversalVertexProgramStep.class, TraversalHelper.getRootTraversal(traversal)).isEmpty();
}
/**
* Traversals
*/
public static class Traversals extends ProfileTest {
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_out_out_profile() {
return g.V().out().out().profile();
}
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_repeatXbothX_timesX3X_profile() {
return g.V().repeat(both()).times(3).profile();
}
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profile() {
return g.V().sideEffect(v -> {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}).sideEffect(v -> {
try {
Thread.sleep(5);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}).profile();
}
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_whereXinXcreatedX_count_isX1XX_name_profile() {
return g.V().where(__.in("created").count().is(1l)).<String>values("name").profile();
}
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profile() {
return g.V().match(__.as("a").out("created").as("b"), __.as("b").in().count().is(P.eq(1))).<String>select("a", "b").profile();
}
@Override
public Traversal<Vertex, Vertex> get_g_V_out_out_profileXmetricsX() {
return g.V().out().out().profile(METRICS_KEY);
}
@Override
public Traversal<Vertex, Vertex> get_g_V_repeatXbothX_timesX3X_profileXmetricsX() {
return g.V().repeat(both()).times(3).profile(METRICS_KEY);
}
@Override
public Traversal<Vertex, Vertex> get_g_V_sideEffectXThread_sleepX10XX_sideEffectXThread_sleepX5XX_profileXmetricsX() {
return g.V().sideEffect(v -> {
try {
Thread.sleep(10);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}).sideEffect(v -> {
try {
Thread.sleep(5);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}).profile(METRICS_KEY);
}
@Override
public Traversal<Vertex, String> get_g_V_whereXinXcreatedX_count_isX1XX_name_profileXmetricsX() {
return g.V().where(__.in("created").count().is(1l)).<String>values("name").profile(METRICS_KEY);
}
@Override
public Traversal<Vertex, Map<String, String>> get_g_V_matchXa_created_b__b_in_count_isXeqX1XXX_selectXa_bX_profileXmetricsX() {
return g.V().match(__.as("a").out("created").as("b"), __.as("b").in().count().is(P.eq(1))).<String>select("a", "b").profile(METRICS_KEY);
}
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_hasLabelXpersonX_pageRank_withXpropertyName_rankX_withXedges_bothEX_rank_profile() {
return g.V().hasLabel("person").pageRank().with(PageRank.propertyName, "rank").with(PageRank.edges, __.bothE()).values("rank").profile();
}
@Override
public Traversal<Vertex, TraversalMetrics> get_g_V_groupXmX_profile() {
return g.V().group("m").profile();
}
}
}
| |
/*******************************************************************************
*
* Copyright 2015 Walmart, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package com.oneops.inductor;
import static com.oneops.cms.util.CmsConstants.QUEUE_TIME;
import static com.oneops.cms.util.CmsConstants.REQUEST_DEQUE_TS;
import static com.oneops.cms.util.CmsConstants.REQUEST_ENQUE_TS;
import static com.oneops.cms.util.CmsConstants.SEARCH_TS_PATTERN;
import static com.oneops.inductor.InductorConstants.ACTION_ORDER_TYPE;
import static com.oneops.inductor.InductorConstants.SEARCH_TS_FORMATS;
import static com.oneops.inductor.InductorConstants.WORK_ORDER_TYPE;
import static org.apache.commons.httpclient.util.DateUtil.formatDate;
import static org.apache.commons.httpclient.util.DateUtil.parseDate;
import com.codahale.metrics.MetricRegistry;
import com.google.gson.Gson;
import com.google.gson.stream.JsonReader;
import com.oneops.cms.domain.CmsWorkOrderSimpleBase;
import com.oneops.cms.execution.Response;
import com.oneops.cms.execution.Result;
import com.oneops.cms.simple.domain.CmsActionOrderSimple;
import com.oneops.cms.simple.domain.CmsWorkOrderSimple;
import com.oneops.cms.util.CmsConstants;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicInteger;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.apache.commons.httpclient.util.DateParseException;
import org.apache.commons.httpclient.util.DateUtil;
import org.apache.log4j.Logger;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.stereotype.Component;
/**
* Listener - consumes from queue by cloud to execute local or remote puppet modules or chef recipes
* for work or action orders <p> onMessage is mapped to a Spring ListenerContainer.messageListener
*/
@Component
public class Listener implements MessageListener, ApplicationContextAware {
private static final Logger logger = Logger.getLogger(Listener.class);
final private Gson gson = new Gson();
private ApplicationContext applicationContext = null;
// Number active work orders being processed
private AtomicInteger activeThreads = new AtomicInteger(0);
private Semaphore semaphore = null;
private MessagePublisher messagePublisher = null;
private Config config = null;
private File dataDir = null;
private WorkOrderExecutor workOrderExecutor;
private ActionOrderExecutor actionOrderExecutor;
private MetricRegistry registry;
@Autowired
ClassMatchingWoExecutor classMatchingWoExecutor;
/**
* allow it to run via cmdline
*/
public static void main(String[] args) {
ApplicationContext context = new ClassPathXmlApplicationContext("application-context.xml");
}
/**
* init - configuration / defaults
*/
public void init() {
dataDir = new File(config.getDataDir());
checkFreeSpace();
logger.info(this);
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
DefaultMessageListenerContainer listenerContainer = (DefaultMessageListenerContainer) applicationContext
.getBean("listenerContainer");
logger.info("Stopping listener container...");
listenerContainer.stop();
while (activeThreads.get() > 0) {
logger.info("Shutdown in progress. sleeping for 10sec. activeThreads: " + activeThreads);
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
logger.info("Got InterruptedException, but will still let the activeThreads complete.");
}
}
logger.info("Shutdown done.");
}));
File testDir = new File(dataDir, "../test");
logger.info("Verification test directory created: " + testDir.mkdirs());
}
/**
* check for free space - shutdownshutdown listener and gracefully exit if full
*/
@SuppressWarnings("static-access")
private void checkFreeSpace() {
long freeMB = dataDir.getFreeSpace() / 1024 / 1024;
if (freeMB < config.getMinFreeSpaceMB()) {
DefaultMessageListenerContainer listenerContainer = (DefaultMessageListenerContainer)
applicationContext.getBean("listenerContainer");
logger.info("Stopping listener container due to "
+ config.getDataDir() + " free space mb: "
+ freeMB + " ... min_free_space_mb: " + config.getMinFreeSpaceMB());
listenerContainer.stop();
while (activeThreads.get() > 0) {
logger
.error("Shutdown in progress due " + config.getDataDir() + " free space mb: "
+ freeMB + " ... min_free_space_mb: " + config.getMinFreeSpaceMB()
+ ". sleeping for 10sec. activeThreads: " + activeThreads);
try {
Thread.currentThread().sleep(10000);
} catch (InterruptedException e) {
logger.info(
"Got InterruptedException, but will still let the activeThreads complete.");
}
}
Runtime.getRuntime().exit(1);
} else {
logger.info(config.getDataDir() + " free space mb: " + freeMB);
}
}
/**
* for unit test setup
*/
public void setConfig(Config config) {
this.config = config;
}
/**
* MessageListener mapped in application-context.xml - will deserialize to a WorkOrder
* (iaas/swdist) or ActionOrder (procedure)
*
* @param msg Message
* @see javax.jms.MessageListener#onMessage(javax.jms.Message)
*/
public void onMessage(Message msg) {
try {
checkFreeSpace();
activeThreads.getAndIncrement();
if (msg instanceof TextMessage) {
String msgText = ((TextMessage) msg).getText();
final String correlationID = msg.getJMSCorrelationID();
Map<String, String> responseMsgMap;
String type = msg.getStringProperty("type");
CmsWorkOrderSimpleBase wo;
switch (type) {
// WorkOrder
case WORK_ORDER_TYPE: {
long t = System.currentTimeMillis();
wo = getWorkOrderOf(msgText, CmsWorkOrderSimple.class);
wo.putSearchTag("iWoCrtTime", Long.toString(System.currentTimeMillis() - t));
String logKey = workOrderExecutor.getLogKey(wo);
logger.info(logKey + " Inductor: " + config.getIpAddr());
preProcess(wo);
wo.putSearchTag("rfcAction", wo.getAction());
Response response = runWithMatchingExecutor(wo);
if (response == null || response.getResult() == Result.NOT_MATCHED) {
responseMsgMap = workOrderExecutor.processAndVerify(wo, correlationID);
}
else {
responseMsgMap = response.getResponseMap();
responseMsgMap.put("correlationID", correlationID);
postExecTags(wo);
}
break;
}
// ActionOrder
case ACTION_ORDER_TYPE: {
long t = System.currentTimeMillis();
wo = getWorkOrderOf(msgText, CmsActionOrderSimple.class);
wo.putSearchTag("iAoCrtTime", Long.toString(System.currentTimeMillis() - t));
preProcess(wo);
responseMsgMap = actionOrderExecutor.processAndVerify(wo, correlationID);
break;
}
default:
logger.error(new IllegalArgumentException("Unknown msg type - " + type));
msg.acknowledge();
return;
}
// Controller will process this message
responseMsgMap.put("correlationID", correlationID);
responseMsgMap.put("type", type);
long startTime = System.currentTimeMillis();
if (!correlationID.equals("test")) {
messagePublisher.publishMessage(responseMsgMap);
}
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
// ack message
logger.debug("Send message took:" + duration + "ms");
msg.acknowledge();
}
} catch (JMSException | SecurityException | IOException | IllegalArgumentException e) {
logger.error("Error occurred in processing message", e);
} finally {
// Decrement the total number of active threads consumed by 1
activeThreads.getAndDecrement();
clearStateFile();
}
}
private Response runWithMatchingExecutor(CmsWorkOrderSimpleBase wo) {
preExecTags(wo);
Response response;
if (config.isVerifyMode()) {
response = classMatchingWoExecutor.executeAndVerify((CmsWorkOrderSimple) wo, config.getDataDir());
}
else {
response = classMatchingWoExecutor.execute((CmsWorkOrderSimple) wo, config.getDataDir());
}
return response;
}
private void preProcess(CmsWorkOrderSimpleBase wo) {
setStateFile(wo);
setQueueTime(wo);
}
private void preExecTags(CmsWorkOrderSimpleBase wo) {
wo.putSearchTag("inductor", config.getIpAddr());
}
private void postExecTags(CmsWorkOrderSimpleBase wo) {
wo.putSearchTag(
CmsConstants.RESPONSE_ENQUE_TS, DateUtil.formatDate(new Date(), SEARCH_TS_PATTERN));
}
private CmsWorkOrderSimpleBase getWorkOrderOf(String msgText, Class c) {
CmsWorkOrderSimpleBase wo;
JsonReader reader = new JsonReader(new StringReader(msgText));
reader.setLenient(true);
wo = gson.fromJson(reader, c);
return wo;
}
/**
* set state file by thread id
*/
private void setStateFile(CmsWorkOrderSimpleBase wo) {
String filename = getStateFileName();
String content = System.currentTimeMillis() + " "
+ wo.getClassName() + "::"
+ wo.getAction() + " "
+ wo.getNsPath() + System.lineSeparator();
writeStateFile(filename, content);
}
private String getStateFileName() {
return config.getDataDir() + "/state-" + Thread.currentThread().getId();
}
/**
* clear state file by thread id
*/
private void clearStateFile() {
String filename = getStateFileName();
String content = "idle" + System.lineSeparator();
content += System.currentTimeMillis();
writeStateFile(filename, content);
}
private void writeStateFile(String filename, String content) {
try (BufferedWriter bw = Files.newBufferedWriter(Paths.get(filename))) {
bw.write(content);
bw.close();
logger.debug("clear state file: " + filename);
} catch (IOException e) {
logger.error("could not write file: " + filename + " msg:" + e.getMessage());
}
}
/**
* Set the queue time in the wo/ao for search/analytics
*/
private <T> void setQueueTime(CmsWorkOrderSimpleBase<T> wo) {
String totalTime, requestDequeTs;
try {
requestDequeTs = formatDate(new Date(), SEARCH_TS_PATTERN);
wo.putSearchTag(REQUEST_DEQUE_TS, requestDequeTs);
totalTime = String.valueOf(getTimeDiff(wo) / 1000.0);
wo.getSearchTags().put(QUEUE_TIME, totalTime);
} catch (Exception e) {
logger.error("Exception occurred while setting queue time " + e);
}
}
private <T> long getTimeDiff(CmsWorkOrderSimpleBase<T> wo) throws DateParseException {
String currentDate = formatDate(new Date(), SEARCH_TS_PATTERN);
long currentTime = parseDate(currentDate, SEARCH_TS_FORMATS).getTime();
long requestEnqueTime = parseDate(wo.getSearchTags().get(REQUEST_ENQUE_TS), SEARCH_TS_FORMATS)
.getTime();
return currentTime - requestEnqueTime;
}
/**
* setter for spring to wire the MessagePublisher
*/
public void setMessagePublisher(MessagePublisher mp) {
this.messagePublisher = mp;
}
public InductorStatus getStatus() {
InductorStatus stat = new InductorStatus();
stat.setQueueBacklog(0);
stat.setQueueName(config.getInQueue());
return stat;
}
@Override
public void setApplicationContext(ApplicationContext ac)
throws BeansException {
this.applicationContext = ac;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("Inductor{ ");
sb.append(config);
sb.append(", semaphore=").append(semaphore);
sb.append('}');
return sb.toString();
}
public void setWorkOrderExecutor(WorkOrderExecutor workOrderExecutor) {
this.workOrderExecutor = workOrderExecutor;
}
public void setActionOrderExecutor(ActionOrderExecutor actionOrderExecutor) {
this.actionOrderExecutor = actionOrderExecutor;
}
public void setRegistry(MetricRegistry registry) {
this.registry = registry;
}
public MetricRegistry getRegistry() {
return registry;
}
}
| |
/*
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
*******************************************************************************
* Copyright (C) 1996-2014, International Business Machines Corporation and *
* others. All Rights Reserved. *
*******************************************************************************
*/
package jdk.internal.icu.text;
import jdk.internal.icu.impl.CharacterIteratorWrapper;
import jdk.internal.icu.impl.ReplaceableUCharacterIterator;
import jdk.internal.icu.impl.UCharacterProperty;
import java.text.CharacterIterator;
/**
* Abstract class that defines an API for iteration on text objects.This is an
* interface for forward and backward iteration and random access into a text
* object. Forward iteration is done with post-increment and backward iteration
* is done with pre-decrement semantics, while the
* <code>java.text.CharacterIterator</code> interface methods provided forward
* iteration with "pre-increment" and backward iteration with pre-decrement
* semantics. This API is more efficient for forward iteration over code points.
* The other major difference is that this API can do both code unit and code point
* iteration, <code>java.text.CharacterIterator</code> can only iterate over
* code units and is limited to BMP (0 - 0xFFFF)
* @author Ram
* @stable ICU 2.4
*/
public abstract class UCharacterIterator
implements Cloneable {
/**
* Protected default constructor for the subclasses
* @stable ICU 2.4
*/
protected UCharacterIterator(){
}
/**
* Indicator that we have reached the ends of the UTF16 text.
* Moved from UForwardCharacterIterator.java
* @stable ICU 2.4
*/
public static final int DONE = -1;
// static final methods ----------------------------------------------------
/**
* Returns a <code>UCharacterIterator</code> object given a
* source string.
* @param source a string
* @return UCharacterIterator object
* @exception IllegalArgumentException if the argument is null
* @stable ICU 2.4
*/
public static final UCharacterIterator getInstance(String source){
return new ReplaceableUCharacterIterator(source);
}
/**
* Returns a <code>UCharacterIterator</code> object given a
* source StringBuffer.
* @param source an string buffer of UTF-16 code units
* @return UCharacterIterator object
* @exception IllegalArgumentException if the argument is null
* @stable ICU 2.4
*/
public static final UCharacterIterator getInstance(StringBuffer source){
return new ReplaceableUCharacterIterator(source);
}
/**
* Returns a <code>UCharacterIterator</code> object given a
* CharacterIterator.
* @param source a valid CharacterIterator object.
* @return UCharacterIterator object
* @exception IllegalArgumentException if the argument is null
* @stable ICU 2.4
*/
public static final UCharacterIterator getInstance(CharacterIterator source){
return new CharacterIteratorWrapper(source);
}
// public methods ----------------------------------------------------------
/**
* Returns the length of the text
* @return length of the text
* @stable ICU 2.4
*/
public abstract int getLength();
/**
* Gets the current index in text.
* @return current index in text.
* @stable ICU 2.4
*/
public abstract int getIndex();
/**
* Returns the UTF16 code unit at index, and increments to the next
* code unit (post-increment semantics). If index is out of
* range, DONE is returned, and the iterator is reset to the limit
* of the text.
* @return the next UTF16 code unit, or DONE if the index is at the limit
* of the text.
* @stable ICU 2.4
*/
public abstract int next();
/**
* Returns the code point at index, and increments to the next code
* point (post-increment semantics). If index does not point to a
* valid surrogate pair, the behavior is the same as
* <code>next()</code>. Otherwise the iterator is incremented past
* the surrogate pair, and the code point represented by the pair
* is returned.
* @return the next codepoint in text, or DONE if the index is at
* the limit of the text.
* @stable ICU 2.4
*/
public int nextCodePoint(){
int ch1 = next();
if(UTF16.isLeadSurrogate((char)ch1)){
int ch2 = next();
if(UTF16.isTrailSurrogate((char)ch2)){
return UCharacterProperty.getRawSupplementary((char)ch1,
(char)ch2);
}else if (ch2 != DONE) {
// unmatched surrogate so back out
previous();
}
}
return ch1;
}
/**
* Decrement to the position of the previous code unit in the
* text, and return it (pre-decrement semantics). If the
* resulting index is less than 0, the index is reset to 0 and
* DONE is returned.
* @return the previous code unit in the text, or DONE if the new
* index is before the start of the text.
* @stable ICU 2.4
*/
public abstract int previous();
/**
* Retreat to the start of the previous code point in the text,
* and return it (pre-decrement semantics). If the index is not
* preceeded by a valid surrogate pair, the behavior is the same
* as <code>previous()</code>. Otherwise the iterator is
* decremented to the start of the surrogate pair, and the code
* point represented by the pair is returned.
* @return the previous code point in the text, or DONE if the new
* index is before the start of the text.
* @stable ICU 2.4
*/
public int previousCodePoint(){
int ch1 = previous();
if(UTF16.isTrailSurrogate((char)ch1)){
int ch2 = previous();
if(UTF16.isLeadSurrogate((char)ch2)){
return UCharacterProperty.getRawSupplementary((char)ch2,
(char)ch1);
}else if (ch2 != DONE) {
//unmatched trail surrogate so back out
next();
}
}
return ch1;
}
/**
* Sets the index to the specified index in the text.
* @param index the index within the text.
* @exception IndexOutOfBoundsException is thrown if an invalid index is
* supplied
* @stable ICU 2.4
*/
public abstract void setIndex(int index);
/**
* Sets the current index to the start.
* @stable ICU 2.4
*/
public void setToStart() {
setIndex(0);
}
/**
* Fills the buffer with the underlying text storage of the iterator
* If the buffer capacity is not enough a exception is thrown. The capacity
* of the fill in buffer should at least be equal to length of text in the
* iterator obtained by calling <code>getLength()</code>.
* <b>Usage:</b>
*
* <pre>{@code
* UChacterIterator iter = new UCharacterIterator.getInstance(text);
* char[] buf = new char[iter.getLength()];
* iter.getText(buf);
*
* OR
* char[] buf= new char[1];
* int len = 0;
* for(;;){
* try{
* len = iter.getText(buf);
* break;
* }catch(IndexOutOfBoundsException e){
* buf = new char[iter.getLength()];
* }
* }
* }</pre>
*
* @param fillIn an array of chars to fill with the underlying UTF-16 code
* units.
* @param offset the position within the array to start putting the data.
* @return the number of code units added to fillIn, as a convenience
* @exception IndexOutOfBoundsException exception if there is not enough
* room after offset in the array, or if offset < 0.
* @stable ICU 2.4
*/
public abstract int getText(char[] fillIn, int offset);
/**
* Convenience override for <code>getText(char[], int)</code> that provides
* an offset of 0.
* @param fillIn an array of chars to fill with the underlying UTF-16 code
* units.
* @return the number of code units added to fillIn, as a convenience
* @exception IndexOutOfBoundsException exception if there is not enough
* room in the array.
* @stable ICU 2.4
*/
public final int getText(char[] fillIn) {
return getText(fillIn, 0);
}
/**
* Convenience method for returning the underlying text storage as a string
* @return the underlying text storage in the iterator as a string
* @stable ICU 2.4
*/
public String getText() {
char[] text = new char[getLength()];
getText(text);
return new String(text);
}
/**
* Moves the current position by the number of code points
* specified, either forward or backward depending on the sign of
* delta (positive or negative respectively). If the current index
* is at a trail surrogate then the first adjustment is by code
* unit, and the remaining adjustments are by code points. If the
* resulting index would be less than zero, the index is set to
* zero, and if the resulting index would be greater than limit,
* the index is set to limit.
* @param delta the number of code units to move the current index.
* @return the new index
* @exception IndexOutOfBoundsException is thrown if an invalid delta is
* supplied
* @stable ICU 2.4
*
*/
public int moveCodePointIndex(int delta){
if(delta>0){
while(delta>0 && nextCodePoint() != DONE){delta--;}
}else{
while(delta<0 && previousCodePoint() != DONE){delta++;}
}
if(delta!=0){
throw new IndexOutOfBoundsException();
}
return getIndex();
}
/**
* Creates a copy of this iterator, independent from other iterators.
* If it is not possible to clone the iterator, returns null.
* @return copy of this iterator
* @stable ICU 2.4
*/
public Object clone() throws CloneNotSupportedException{
return super.clone();
}
}
| |
package net.pbrennan.Lander_2009;
// --------------------------------------------------------------------
//
// TestLMRunner2
//
// invoked as:
// java [-classpath <classpath>] TestLMRunner2 [-mission=n] [-level=m]
//
// --------------------------------------------------------------------
import javax.swing.JFrame;
import net.pbrennan.Lander_2009.LunarSpacecraft2D.Status;
import java.awt.AWTEvent;
import java.awt.event.WindowEvent;
import java.awt.Color;
import java.awt.FlowLayout;
public class TestLMRunner2 extends JFrame implements Runnable
{
/**
*
*/
private static final long serialVersionUID = 2959138309093055826L;
/**
* Desired frame rate
*
* NOTE: These two MUST be kept in sync with each other!
*/
public static final int TARGET_FRAME_RATE = 60;
private static final long gSleepTimeMS = (int)(Math.floor(1000 / TARGET_FRAME_RATE));
private static final double gTickTimeSeconds = gSleepTimeMS / 1000.0;
/**
* main
* @param arg
*/
public static void main(String[] arg)
{
TestLMRunner2 instance = new TestLMRunner2(arg);
System.out.println("gSleepTimeMS = " + gSleepTimeMS);
System.out.println("gTickTimeSeconds = " + gTickTimeSeconds);
instance.start();
}
private boolean printHelpAndExit = false;
public int parseIntegerArgument(String s)
{
int rv = -1;
String[] sarray = s.split("=");
if (sarray.length != 2)
{
//System.out.println("Couldn't split " + s);
printHelpAndExit = true;
return -1;
}
try
{
//System.out.println("Trying to parse " + sarray[1] + " into an integer");
rv = Integer.parseInt(sarray[1]);
}
catch (NumberFormatException e)
{
//System.out.println("Got a Number Format exception: " + e);
printHelpAndExit = true;
return -1;
}
return rv;
}
private int userWidth = -1;
private int userHeight = -1;
private boolean parseGeometryArgument(String s)
{
String[] sarray1 = s.split("=");
if (sarray1.length != 2)
{
return false;
}
String[] sarray2 = sarray1[1].split("x");
if (sarray2.length != 2)
{
return false;
}
try
{
userWidth = Integer.parseInt(sarray2[0]);
if (userWidth < SideView4.MIN_WIDTH)
userWidth = SideView4.MIN_WIDTH;
else if (userWidth > SideView4.MAX_WIDTH)
userWidth = SideView4.MAX_WIDTH;
userHeight = Integer.parseInt(sarray2[1]);
if (userHeight < SideView4.MIN_HEIGHT)
userHeight = SideView4.MIN_HEIGHT;
else if (userHeight > SideView4.MAX_HEIGHT)
userHeight = SideView4.MAX_HEIGHT;
return true;
}
catch (NumberFormatException e)
{
//System.out.println("Got a Number Format exception: " + e);
printHelpAndExit = true;
return false;
}
}
public void parseArguments(String[] args)
{
// Set defaults here
m_missionNum = 1;
m_level = 0;
for (int i=0 ; i<args.length ; ++i)
{
if (args[i].startsWith("-mission="))
{
m_missionNum = parseIntegerArgument(args[i]);
System.out.println("Mission Number set to " + m_missionNum);
}
else if (args[i].startsWith("-level="))
{
m_level = parseIntegerArgument(args[i]);
System.out.println("Level set to " + m_level);
}
else if (args[i].startsWith("-geometry="))
{
if (!parseGeometryArgument(args[i]))
printHelpAndExit = true;
}
else
{
printHelpAndExit = true;
}
}
if (printHelpAndExit)
{
System.out.println("Usage:\njava TestLMRunner2 [-mission=n] [-level=m] [-geometry=<width>x<height>]");
System.out.println(" width must be >= " + SideView4.MIN_WIDTH + " and <= " + SideView4.MAX_WIDTH);
System.out.println(" height must be >= " + SideView4.MIN_HEIGHT + " and <= " + SideView4.MAX_HEIGHT);
System.exit(0);
}
}
public void resetScenario()
{
m_lm.Initialize(m_missions.getMission(m_missionNum));
m_sideview.resetParticles();
}
public TestLMRunner2(String [] arg)
{
parseArguments(arg);
setLayout(new FlowLayout(FlowLayout.LEFT,3,3));
setBackground(Color.darkGray);
m_thread = new Thread(this);
m_thread.setPriority(Thread.MAX_PRIORITY);
m_data = new LMInstrumentData();
m_sideview = new SideView4(userWidth,userHeight);
m_data.addListener(m_sideview);
LMEventSource.getInstance().addListener(m_sideview);
System.out.println("creating a LMRunner...");
m_runner = new LMRunner();
m_runner.setTimeFactor(1);
System.out.println("creating a LunarSpacecraft2D...");
m_lm = new LunarSpacecraft2D();
m_runner.setLM(m_lm);
System.out.println("creating a Terrain...");
m_lm.SetTerrain(new Terrain());
m_sideview.SetTerrain(m_lm.GetTerrain());
m_missions = new MissionCollection();
// TODO: Allow the user to select a target interactively.
// TODO: The setting of the target also drives setting the
// DOI cue and the PDI cue.
//m_lm.SetTargetLong(Math.toRadians(252.0));
m_controls = new LMControls();
addKeyListener(m_controls);
m_sideview.addKeyListener(m_controls);
m_sideview.addKeyBindingsString(m_controls.getKeyBindingsDescription2(false));
m_soundMgr = new SoundManager();
m_data.addListener(m_soundMgr);
LMEventSource.getInstance().addListener(m_soundMgr);
enableEvents(AWTEvent.WINDOW_EVENT_MASK);
setTitle("LM Runner Test");
setResizable(false);
}
public void processWindowEvent(WindowEvent event)
{
if (event.getID() == WindowEvent.WINDOW_CLOSING)
{
System.exit(0);
}
}
public void start()
{
resetScenario();
add(m_sideview);
pack();
setVisible(true);
if (m_thread != null)
m_thread.start();
}
public void run()
{
try
{
boolean help_lastFrame = false;
boolean help_thisFrame = false;
boolean paused_lastFrame = false;
boolean paused_thisFrame = false;
while (true)
{
m_lm.SetRotationAccel(Math.toRadians(m_controls.getCommandedYawRate()));
m_lm.SetTranslationAccel(m_controls.getCommandedRCSRate());
m_lm.SetThrottleCommand(m_controls.getThrottleCommand());
m_lm.SetThrottleRate(m_controls.getCommandedThrottleRate());
if (m_controls.GetToggleRCSRotationMode())
{
m_lm.ToggleRCSRotationMode();
}
if (m_controls.GetCycleAutopilotMode())
{
m_lm.CycleAutopilotMode();
}
if (m_controls.GetToggleAutopilot())
{
m_lm.SetAutopilotOn(!(m_lm.GetAutopilotOn()));
}
if (m_controls.getPrintStatus())
{
System.out.println(m_lm.MissionState());
}
m_soundMgr.setMute(m_controls.getPaused());
paused_lastFrame = paused_thisFrame;
paused_thisFrame = m_controls.getPaused();
help_lastFrame = help_thisFrame;
help_thisFrame = m_controls.getHelp();
m_runner.setPaused(paused_thisFrame);
m_runner.setTimeFactor(m_controls.getTimeAcceleration());
if (m_controls.getResetInterfaceState())
{
resetScenario();
}
//System.out.println("TestLMRunner2.run: ticking model...");
m_runner.tick(gTickTimeSeconds); // TODO: Configurable frame rate
//System.out.println("TestLMRunner2.run: Done ticking model...");
if (!paused_lastFrame)
{
m_data = m_runner.getInstrumentData(m_data);
m_data.SetRadarOn(m_controls.GetRadarOn());
//System.out.println("TestLMRunner2.run: sending data...");
m_data.send();
m_sideview.setScalingMode(m_controls.GetScalingMode());
if (m_lm.GetStatus() == Status.Exploding)
{
/*m_sideview.m_particles.spawnVerticalBias(
0.0, 0.0, 1.0,
0.0, 0.0,
300.0, 30.0);*/
double terrainNormalAngle = m_lm.GetTerrainNormalAngle();
double currentTheta = m_lm.GetTheta();
double offset = Math.PI * 0.5;
System.out.println("Terrain Normal = " + terrainNormalAngle);
System.out.println("Current Theta = " + currentTheta);
System.out.println("offset = " + offset);
m_sideview.m_particles.spawnWCentralAngle(
terrainNormalAngle - currentTheta + offset ,
0.0, 0.0, 1.0,
0.0, 0.0,
300.0, 30.0);
m_lm.SetStatus(Status.Dead);
}
m_sideview.setHelpDisplay(help_thisFrame);
//System.out.println("TestLMRunner2.run: Repainting...");
m_sideview.repaint();
}
Thread.sleep(gSleepTimeMS); // TODO: Configurable frame rate
}
}
catch (Exception e)
{
return;
}
}
// Game options
private int m_missionNum; // The scenario to load
private int m_level; // The difficulty level.
// Game objects to manage.
private Thread m_thread; // The main thread.
private SideView4 m_sideview; // The side view of the action, the "main window"
private SoundManager m_soundMgr;
private MissionCollection m_missions; // The missions.
private LMInstrumentData m_data; // The data which feeds the instruments
private LMRunner m_runner; // The object which runs the physics loop in its own thread.
private LMControls m_controls; // The control state, gets input and puts it into a form to be used.
private LunarSpacecraft2D m_lm; // The player's ship.
}
| |
package io.github.LonelyNeptune.HorizonShips;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.configuration.file.FileConfiguration;
import com.sk89q.worldedit.data.DataException;
// This class contains all the information pertaining to a ship. It is controlled by a ShipHandler.
class Ship
{
private FileConfiguration data;
private String name;
private Destination destination;
private int dock;
private int fuel;
private boolean broken;
private String repairItem;
private boolean consumePart;
private List<UUID> pilots = new ArrayList<>();
private int length;
private int width;
private int height;
private UUID owner;
// Constructor for fetching an existing ship from the data file.
Ship(FileConfiguration data, String name)
{
String path = "ships." + name + ".";
if (data.getConfigurationSection("ships." + name) == null) //Ship doesn't exist
throw new IllegalArgumentException("Ship does not exist.");
this.data = data;
this.name = name;
destination = new Destination(data, data.getString(path + "dock.destination"), true);
dock = data.getInt(path + "dock.id");
fuel = data.getInt(path + "fuel");
broken = data.getBoolean(path + "broken");
repairItem = data.getString(path + "repairItem");
consumePart = data.getBoolean(path + "consumePart");
List<String> pilotsString = data.getStringList(path + "pilots");
for (String s: pilotsString)
pilots.add(UUID.fromString(s));
length = data.getInt(path + "length");
width = data.getInt(path + "width");
height = data.getInt(path + "height");
if (data.getString(path + "owner") != null)
owner = UUID.fromString(data.getString(path + "owner"));
}
// Constructor for creating an entirely new ship.
Ship(FileConfiguration data, String name, Location min, Location max) throws DataException, IOException
{
this.data = data;
String path = "ships." + name + ".";
setName(name);
setFuel(10);
setBroken(false);
setLength(max.getBlockX() - min.getBlockX());
setWidth(max.getBlockZ() - min.getBlockZ());
setHeight(max.getBlockY() - min.getBlockY());
//Try to make a new temp destination. If it already exists that's fine.
try { destination = new Destination(data, "temp", false); }
catch (IllegalArgumentException e) { destination = new Destination(data, "temp", true); }
Dock tempDock = destination.addDock(min, length, height, width);
tempDock.updateShipName(name);
data.set(path + ".dock.destination", destination.getName());
data.set(path + "dock.id", tempDock.getID());
SchematicManager sm = new SchematicManager(min.getWorld());
sm.saveSchematic(min, max, name + "\\ship");
}
// deleteShip() removes all the information about the ship and its destinations. The instance or its destinations
// should NOT continue to be used after this is called.
void deleteShip()
{
data.set("ships." + name, null);
//Delete ship schematic
SchematicManager sm = new SchematicManager(Bukkit.getWorld("world"));
sm.deleteSchematic(name + "\\");
}
// getDock() returns the dock that the ship is currently inhabiting.
Dock getDock()
{
return destination.getDock(dock);
}
// setDock() sets the dock that the ship is currently inhabiting.
void setDock(Dock dock) throws IllegalArgumentException
{
Dock oldDock = destination.getDock(this.dock);
//Keep the ship name updated on the dock so the ship belonging to it can easily be found.
oldDock.updateShipName(null);
dock.updateShipName(name);
//If the old dock was temporary it should be removed.
if (oldDock.getDestination().equalsIgnoreCase("temp"))
{
Destination destination;
try { destination = new Destination(data, "temp", false); }
catch (IllegalArgumentException e) { destination = new Destination(data, "temp", true); }
destination.getDocks().remove(oldDock);
oldDock.delete();
}
//Set the dock
this.dock = dock.getID();
this.destination = new Destination(data, dock.getDestination(), true);
data.set("ships." + name + ".dock.destination", this.destination.getName());
data.set("ships." + name + ".dock.id", this.dock);
}
// getDestination() returns the destination at which the ship is currently located.
Destination getDestination()
{
return destination;
}
// setDestination() sets the name of the destination at which the ship is currently located.
void setDestination(Destination destination)
{
this.destination = destination;
}
// isBroken() returns true if the ship is broken, false otherwise.
boolean isBroken()
{
return broken;
}
// setBroken() sets the broken status of the ship.
void setBroken(boolean broken)
{
this.broken = broken;
data.set("ships." + name + ".broken", broken);
}
// getLength() returns the length of the ship.
int getLength()
{
return length;
}
// setLength() sets the length of the ship.
private void setLength(int length)
{
this.length = length;
data.set("ships." + name + ".length", length);
}
// getWidth() returns the width of the ship.
int getWidth()
{
return width;
}
// setWidth() sets the length of the ship.
private void setWidth(int width)
{
this.width = width;
data.set("ships." + name + ".width", width);
}
// getHeight() returns the height of the ship.
int getHeight()
{
return height;
}
// setHeight() sets the height of the ship.
private void setHeight(int height)
{
this.height = height;
data.set("ships." + name + ".height", height);
}
// getFuel() returns the fuel level of the ship.
int getFuel()
{
return fuel;
}
// setFuel() sets the fuel level of the ship.
void setFuel(int fuel)
{
this.fuel = fuel;
data.set("ships." + name + ".fuel", fuel);
}
// reduceFuel() reduces the fuel level by one.
void reduceFuel()
{
data.set("ships." + name + ".fuel", --fuel);
}
// getName() returns the name of the ship.
String getName()
{
return name;
}
// setName() sets the name of the ship. To be used only while renaming ships. Setting the name without transferring
// all data will result in lost ship data.
private void setName(String newName)
{
name = newName;
}
// getRepairItem() returns the item required to repair the ship.
String getRepairItem()
{
return repairItem;
}
// setRepairItem() sets the item required to repair the ship.
void setRepairItem(String item)
{
this.repairItem = item;
data.set("ships." + name + ".repairItem", item);
}
// getConsumePart() returns whether the item required to repair the ship is consumed upon repair
boolean getConsumePart()
{
return consumePart;
}
// setConsumePart() sets whether the item required to repair the ship is consumed upon repair.
void setConsumePart(boolean consumePart)
{
this.consumePart = consumePart;
data.set("ships." + name + ".consumePart", consumePart);
}
// getPilots() returns a list of the UUIDs of the players who are permitted to fly the ship.
List<UUID> getPilots()
{
return pilots;
}
// isPilot() checks if the Player provided is allowed to pilot the ship.
boolean isPilot(UUID player)
{
List <String> pilots = data.getStringList("ships." + name + ".pilots");
for (String p : pilots)
if (player.toString().equalsIgnoreCase(p))
return true;
return false;
}
// addPilot() adds the uuid given to the list of permitted pilots for the ship.
void addPilot(UUID uuid)
{
pilots.add(uuid);
List<String> pilotStrings = new ArrayList<>();
for (UUID u: pilots)
pilotStrings.add(u.toString());
data.set("ships." + name + ".pilots", pilotStrings);
}
void removePilot(UUID uuid)
{
pilots.remove(uuid);
List<String> pilotStrings = new ArrayList<>();
for (UUID u: pilots)
pilotStrings.add(u.toString());
data.set("ships." + name + ".pilots", pilotStrings);
}
// getOwner() returns the UUID of the player who is considered to own the ship.
UUID getOwner()
{
return owner;
}
void setOwner(UUID uuid)
{
owner = uuid;
data.set("ships." + name + ".owner", uuid.toString());
}
// rename() takes all of the data under the old ship name and transfers it to a new ship name.
void rename(String newName)
{
String path = "ships." + newName + ".";
data.set(path + "fuel", fuel);
data.set(path + "broken", broken);
data.set(path + "length", length);
data.set(path + "width", width);
data.set(path + "height", height);
data.set(path + "consumePart", consumePart);
data.set(path + "repairItem", repairItem);
data.set(path + "dock.destination", destination.getName());
data.set(path + "dock.id", dock);
if (owner != null)
data.set(path + "owner", owner.toString());
List<String> pilotStrings = new ArrayList<>();
for (UUID p: pilots)
pilotStrings.add(p.toString());
data.set(path + "pilots", pilotStrings);
data.getConfigurationSection("ships.").set(name, null);
name = newName;
}
// exists() returns true if the ship exists on file, false otherwise.
boolean exists()
{
return name != null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.datastreamer;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.Delayed;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import javax.cache.CacheException;
import javax.cache.expiry.ExpiryPolicy;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteInterruptedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterTopologyException;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.events.DiscoveryEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteClientDisconnectedCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.cluster.ClusterTopologyServerNotFoundException;
import org.apache.ignite.internal.managers.communication.GridMessageListener;
import org.apache.ignite.internal.managers.deployment.GridDeployment;
import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.affinity.GridAffinityProcessor;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.CacheObjectContext;
import org.apache.ignite.internal.processors.cache.GridCacheAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheEntryEx;
import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException;
import org.apache.ignite.internal.processors.cache.GridCacheUtils;
import org.apache.ignite.internal.processors.cache.IgniteCacheFutureImpl;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtInvalidPartitionException;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor;
import org.apache.ignite.internal.processors.dr.GridDrType;
import org.apache.ignite.internal.util.GridConcurrentHashSet;
import org.apache.ignite.internal.util.GridSpinBusyLock;
import org.apache.ignite.internal.util.future.GridCompoundFuture;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.future.IgniteFinishedFutureImpl;
import org.apache.ignite.internal.util.lang.GridPeerDeployAware;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.A;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.security.SecurityPermission;
import org.apache.ignite.stream.StreamReceiver;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static org.apache.ignite.events.EventType.EVT_NODE_FAILED;
import static org.apache.ignite.events.EventType.EVT_NODE_LEFT;
import static org.apache.ignite.internal.GridTopic.TOPIC_DATASTREAM;
import static org.apache.ignite.internal.managers.communication.GridIoPolicy.PUBLIC_POOL;
/**
* Data streamer implementation.
*/
@SuppressWarnings("unchecked")
public class DataStreamerImpl<K, V> implements IgniteDataStreamer<K, V>, Delayed {
/** Isolated receiver. */
private static final StreamReceiver ISOLATED_UPDATER = new IsolatedUpdater();
/** Cache receiver. */
private StreamReceiver<K, V> rcvr = ISOLATED_UPDATER;
/** */
private byte[] updaterBytes;
/** Max remap count before issuing an error. */
private static final int DFLT_MAX_REMAP_CNT = 32;
/** Log reference. */
private static final AtomicReference<IgniteLogger> logRef = new AtomicReference<>();
/** Logger. */
private static IgniteLogger log;
/** Cache name ({@code null} for default cache). */
private final String cacheName;
/** Per-node buffer size. */
@SuppressWarnings("FieldAccessedSynchronizedAndUnsynchronized")
private int bufSize = DFLT_PER_NODE_BUFFER_SIZE;
/** */
private int parallelOps = DFLT_MAX_PARALLEL_OPS;
/** */
private long autoFlushFreq;
/** Mapping. */
@GridToStringInclude
private ConcurrentMap<UUID, Buffer> bufMappings = new ConcurrentHashMap8<>();
/** Discovery listener. */
private final GridLocalEventListener discoLsnr;
/** Context. */
private final GridKernalContext ctx;
/** */
private final IgniteCacheObjectProcessor cacheObjProc;
/** */
private final CacheObjectContext cacheObjCtx;
/** Communication topic for responses. */
private final Object topic;
/** */
private byte[] topicBytes;
/** {@code True} if data loader has been cancelled. */
private volatile boolean cancelled;
/** Active futures of this data loader. */
@GridToStringInclude
private final Collection<IgniteInternalFuture<?>> activeFuts = new GridConcurrentHashSet<>();
/** Closure to remove from active futures. */
@GridToStringExclude
private final IgniteInClosure<IgniteInternalFuture<?>> rmvActiveFut = new IgniteInClosure<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> t) {
boolean rmv = activeFuts.remove(t);
assert rmv;
}
};
/** Job peer deploy aware. */
private volatile GridPeerDeployAware jobPda;
/** Deployment class. */
private Class<?> depCls;
/** Future to track loading finish. */
private final GridFutureAdapter<?> fut;
/** Public API future to track loading finish. */
private final IgniteFuture<?> publicFut;
/** Busy lock. */
private final GridSpinBusyLock busyLock = new GridSpinBusyLock();
/** */
private CacheException disconnectErr;
/** Closed flag. */
private final AtomicBoolean closed = new AtomicBoolean();
/** */
private volatile long lastFlushTime = U.currentTimeMillis();
/** */
private final DelayQueue<DataStreamerImpl<K, V>> flushQ;
/** */
private boolean skipStore;
/** */
private int maxRemapCnt = DFLT_MAX_REMAP_CNT;
/** Whether a warning at {@link DataStreamerImpl#allowOverwrite()} printed */
private static boolean isWarningPrinted;
/**
* @param ctx Grid kernal context.
* @param cacheName Cache name.
* @param flushQ Flush queue.
*/
public DataStreamerImpl(
final GridKernalContext ctx,
@Nullable final String cacheName,
DelayQueue<DataStreamerImpl<K, V>> flushQ
) {
assert ctx != null;
this.ctx = ctx;
this.cacheObjProc = ctx.cacheObjects();
if (log == null)
log = U.logger(ctx, logRef, DataStreamerImpl.class);
CacheConfiguration ccfg = ctx.cache().cacheConfiguration(cacheName);
try {
this.cacheObjCtx = ctx.cacheObjects().contextForCache(ccfg);
}
catch (IgniteCheckedException e) {
throw new IgniteException("Failed to initialize cache context.", e);
}
this.cacheName = cacheName;
this.flushQ = flushQ;
discoLsnr = new GridLocalEventListener() {
@Override public void onEvent(Event evt) {
assert evt.type() == EVT_NODE_FAILED || evt.type() == EVT_NODE_LEFT;
DiscoveryEvent discoEvt = (DiscoveryEvent)evt;
UUID id = discoEvt.eventNode().id();
// Remap regular mappings.
final Buffer buf = bufMappings.remove(id);
// Only async notification is possible since
// discovery thread may be trapped otherwise.
if (buf != null) {
waitAffinityAndRun(new Runnable() {
@Override public void run() {
buf.onNodeLeft();
}
}, discoEvt.topologyVersion(), true);
}
}
};
ctx.event().addLocalEventListener(discoLsnr, EVT_NODE_FAILED, EVT_NODE_LEFT);
// Generate unique topic for this loader.
topic = TOPIC_DATASTREAM.topic(IgniteUuid.fromUuid(ctx.localNodeId()));
ctx.io().addMessageListener(topic, new GridMessageListener() {
@Override public void onMessage(UUID nodeId, Object msg) {
assert msg instanceof DataStreamerResponse;
DataStreamerResponse res = (DataStreamerResponse)msg;
if (log.isDebugEnabled())
log.debug("Received data load response: " + res);
Buffer buf = bufMappings.get(nodeId);
if (buf != null)
buf.onResponse(res);
else if (log.isDebugEnabled())
log.debug("Ignoring response since node has left [nodeId=" + nodeId + ", ");
}
});
if (log.isDebugEnabled())
log.debug("Added response listener within topic: " + topic);
fut = new DataStreamerFuture(this);
publicFut = new IgniteCacheFutureImpl<>(fut);
}
/**
* @param c Closure to run.
* @param topVer Topology version to wait for.
* @param async Async flag.
*/
private void waitAffinityAndRun(final Runnable c, long topVer, boolean async) {
AffinityTopologyVersion topVer0 = new AffinityTopologyVersion(topVer, 0);
IgniteInternalFuture<?> fut = ctx.cache().context().exchange().affinityReadyFuture(topVer0);
if (fut != null && !fut.isDone()) {
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> fut) {
ctx.closure().runLocalSafe(c, true);
}
});
}
else {
if (async)
ctx.closure().runLocalSafe(c, true);
else
c.run();
}
}
/**
* @return Cache object context.
*/
public CacheObjectContext cacheObjectContext() {
return cacheObjCtx;
}
/**
* Enters busy lock.
*/
private void enterBusy() {
if (!busyLock.enterBusy()) {
if (disconnectErr != null)
throw disconnectErr;
throw new IllegalStateException("Data streamer has been closed.");
}
}
/**
* Leaves busy lock.
*/
private void leaveBusy() {
busyLock.leaveBusy();
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> future() {
return publicFut;
}
/**
* @return Internal future.
*/
public IgniteInternalFuture<?> internalFuture() {
return fut;
}
/** {@inheritDoc} */
@Override public void deployClass(Class<?> depCls) {
this.depCls = depCls;
}
/** {@inheritDoc} */
@Override public void receiver(StreamReceiver<K, V> rcvr) {
A.notNull(rcvr, "rcvr");
this.rcvr = rcvr;
}
/** {@inheritDoc} */
@Override public boolean allowOverwrite() {
return rcvr != ISOLATED_UPDATER;
}
/** {@inheritDoc} */
@Override public void allowOverwrite(boolean allow) {
if (allow == allowOverwrite())
return;
ClusterNode node = F.first(ctx.grid().cluster().forCacheNodes(cacheName).nodes());
if (node == null)
throw new CacheException("Failed to get node for cache: " + cacheName);
rcvr = allow ? DataStreamerCacheUpdaters.<K, V>individual() : ISOLATED_UPDATER;
}
/** {@inheritDoc} */
@Override public boolean skipStore() {
return skipStore;
}
/** {@inheritDoc} */
@Override public void skipStore(boolean skipStore) {
this.skipStore = skipStore;
}
/** {@inheritDoc} */
@Override @Nullable public String cacheName() {
return cacheName;
}
/** {@inheritDoc} */
@Override public int perNodeBufferSize() {
return bufSize;
}
/** {@inheritDoc} */
@Override public void perNodeBufferSize(int bufSize) {
A.ensure(bufSize > 0, "bufSize > 0");
this.bufSize = bufSize;
}
/** {@inheritDoc} */
@Override public int perNodeParallelOperations() {
return parallelOps;
}
/** {@inheritDoc} */
@Override public void perNodeParallelOperations(int parallelOps) {
this.parallelOps = parallelOps;
}
/** {@inheritDoc} */
@Override public long autoFlushFrequency() {
return autoFlushFreq;
}
/** {@inheritDoc} */
@Override public void autoFlushFrequency(long autoFlushFreq) {
A.ensure(autoFlushFreq >= 0, "autoFlushFreq >= 0");
long old = this.autoFlushFreq;
if (autoFlushFreq != old) {
this.autoFlushFreq = autoFlushFreq;
if (autoFlushFreq != 0 && old == 0)
flushQ.add(this);
else if (autoFlushFreq == 0)
flushQ.remove(this);
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> addData(Map<K, V> entries) throws IllegalStateException {
A.notNull(entries, "entries");
return addData(entries.entrySet());
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> addData(Collection<? extends Map.Entry<K, V>> entries) {
A.notEmpty(entries, "entries");
checkSecurityPermission(SecurityPermission.CACHE_PUT);
enterBusy();
try {
GridFutureAdapter<Object> resFut = new GridFutureAdapter<>();
resFut.listen(rmvActiveFut);
activeFuts.add(resFut);
Collection<KeyCacheObject> keys = null;
if (entries.size() > 1) {
keys = new GridConcurrentHashSet<>(entries.size(), U.capacity(entries.size()), 1);
for (Map.Entry<K, V> entry : entries)
keys.add(cacheObjProc.toCacheKeyObject(cacheObjCtx, entry.getKey(), true));
}
Collection<? extends DataStreamerEntry> entries0 = F.viewReadOnly(entries, new C1<Entry<K, V>, DataStreamerEntry>() {
@Override public DataStreamerEntry apply(Entry<K, V> e) {
KeyCacheObject key = cacheObjProc.toCacheKeyObject(cacheObjCtx, e.getKey(), true);
CacheObject val = cacheObjProc.toCacheObject(cacheObjCtx, e.getValue(), true);
return new DataStreamerEntry(key, val);
}
});
load0(entries0, resFut, keys, 0);
return new IgniteCacheFutureImpl<>(resFut);
}
catch (IgniteException e) {
return new IgniteFinishedFutureImpl<>(e);
}
finally {
leaveBusy();
}
}
/**
* @param key Key.
* @param val Value.
* @return Future.
*/
public IgniteFuture<?> addDataInternal(KeyCacheObject key, CacheObject val) {
return addDataInternal(Collections.singleton(new DataStreamerEntry(key, val)));
}
/**
* @param key Key.
* @return Future.
*/
public IgniteFuture<?> removeDataInternal(KeyCacheObject key) {
return addDataInternal(Collections.singleton(new DataStreamerEntry(key, null)));
}
/**
* @param entries Entries.
* @return Future.
*/
public IgniteFuture<?> addDataInternal(Collection<? extends DataStreamerEntry> entries) {
enterBusy();
GridFutureAdapter<Object> resFut = new GridFutureAdapter<>();
try {
resFut.listen(rmvActiveFut);
activeFuts.add(resFut);
Collection<KeyCacheObject> keys = null;
if (entries.size() > 1) {
keys = new GridConcurrentHashSet<>(entries.size(), U.capacity(entries.size()), 1);
for (DataStreamerEntry entry : entries)
keys.add(entry.getKey());
}
load0(entries, resFut, keys, 0);
return new IgniteCacheFutureImpl<>(resFut);
}
catch (Throwable e) {
resFut.onDone(e);
if (e instanceof Error)
throw e;
return new IgniteFinishedFutureImpl<>(e);
}
finally {
leaveBusy();
}
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> addData(Map.Entry<K, V> entry) {
A.notNull(entry, "entry");
return addData(F.asList(entry));
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> addData(K key, V val) {
A.notNull(key, "key");
if (val == null)
checkSecurityPermission(SecurityPermission.CACHE_REMOVE);
else
checkSecurityPermission(SecurityPermission.CACHE_PUT);
KeyCacheObject key0 = cacheObjProc.toCacheKeyObject(cacheObjCtx, key, true);
CacheObject val0 = cacheObjProc.toCacheObject(cacheObjCtx, val, true);
return addDataInternal(Collections.singleton(new DataStreamerEntry(key0, val0)));
}
/** {@inheritDoc} */
@Override public IgniteFuture<?> removeData(K key) {
return addData(key, null);
}
/**
* @param entries Entries.
* @param resFut Result future.
* @param activeKeys Active keys.
* @param remaps Remaps count.
*/
private void load0(
Collection<? extends DataStreamerEntry> entries,
final GridFutureAdapter<Object> resFut,
@Nullable final Collection<KeyCacheObject> activeKeys,
final int remaps
) {
assert entries != null;
if (!isWarningPrinted) {
synchronized (this) {
if (!allowOverwrite() && !isWarningPrinted) {
U.warn(log, "Data streamer will not overwrite existing cache entries for better performance " +
"(to change, set allowOverwrite to true)");
}
isWarningPrinted = true;
}
}
Map<ClusterNode, Collection<DataStreamerEntry>> mappings = new HashMap<>();
boolean initPda = ctx.deploy().enabled() && jobPda == null;
AffinityTopologyVersion topVer = ctx.cache().context().exchange().readyAffinityVersion();
for (DataStreamerEntry entry : entries) {
List<ClusterNode> nodes;
try {
KeyCacheObject key = entry.getKey();
assert key != null;
if (initPda) {
jobPda = new DataStreamerPda(key.value(cacheObjCtx, false),
entry.getValue() != null ? entry.getValue().value(cacheObjCtx, false) : null,
rcvr);
initPda = false;
}
nodes = nodes(key, topVer);
}
catch (IgniteCheckedException e) {
resFut.onDone(e);
return;
}
if (F.isEmpty(nodes)) {
resFut.onDone(new ClusterTopologyException("Failed to map key to node " +
"(no nodes with cache found in topology) [infos=" + entries.size() +
", cacheName=" + cacheName + ']'));
return;
}
for (ClusterNode node : nodes) {
Collection<DataStreamerEntry> col = mappings.get(node);
if (col == null)
mappings.put(node, col = new ArrayList<>());
col.add(entry);
}
}
for (final Map.Entry<ClusterNode, Collection<DataStreamerEntry>> e : mappings.entrySet()) {
final UUID nodeId = e.getKey().id();
Buffer buf = bufMappings.get(nodeId);
if (buf == null) {
Buffer old = bufMappings.putIfAbsent(nodeId, buf = new Buffer(e.getKey()));
if (old != null)
buf = old;
}
final Collection<DataStreamerEntry> entriesForNode = e.getValue();
IgniteInClosure<IgniteInternalFuture<?>> lsnr = new IgniteInClosure<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> t) {
try {
t.get();
if (activeKeys != null) {
for (DataStreamerEntry e : entriesForNode)
activeKeys.remove(e.getKey());
if (activeKeys.isEmpty())
resFut.onDone();
}
else {
assert entriesForNode.size() == 1;
// That has been a single key,
// so complete result future right away.
resFut.onDone();
}
}
catch (IgniteClientDisconnectedCheckedException e1) {
if (log.isDebugEnabled())
log.debug("Future finished with disconnect error [nodeId=" + nodeId + ", err=" + e1 + ']');
resFut.onDone(e1);
}
catch (IgniteCheckedException e1) {
if (log.isDebugEnabled())
log.debug("Future finished with error [nodeId=" + nodeId + ", err=" + e1 + ']');
if (cancelled) {
resFut.onDone(new IgniteCheckedException("Data streamer has been cancelled: " +
DataStreamerImpl.this, e1));
}
else if (remaps + 1 > maxRemapCnt) {
resFut.onDone(new IgniteCheckedException("Failed to finish operation (too many remaps): "
+ remaps), e1);
}
else
load0(entriesForNode, resFut, activeKeys, remaps + 1);
}
}
};
final GridFutureAdapter<?> f;
try {
f = buf.update(entriesForNode, topVer, lsnr);
}
catch (IgniteInterruptedCheckedException e1) {
resFut.onDone(e1);
return;
}
if (ctx.discovery().node(nodeId) == null) {
if (bufMappings.remove(nodeId, buf)) {
final Buffer buf0 = buf;
waitAffinityAndRun(new Runnable() {
@Override public void run() {
buf0.onNodeLeft();
if (f != null)
f.onDone(new ClusterTopologyCheckedException("Failed to wait for request completion " +
"(node has left): " + nodeId));
}
}, ctx.discovery().topologyVersion(), false);
}
}
}
}
/**
* @param key Key to map.
* @param topVer Topology version.
* @return Nodes to send requests to.
* @throws IgniteCheckedException If failed.
*/
private List<ClusterNode> nodes(KeyCacheObject key, AffinityTopologyVersion topVer) throws IgniteCheckedException {
GridAffinityProcessor aff = ctx.affinity();
List<ClusterNode> res = null;
if (!allowOverwrite())
res = aff.mapKeyToPrimaryAndBackups(cacheName, key, topVer);
else {
ClusterNode node = aff.mapKeyToNode(cacheName, key, topVer);
if (node != null)
res = Collections.singletonList(node);
}
if (F.isEmpty(res))
throw new ClusterTopologyServerNotFoundException("Failed to find server node for cache (all affinity " +
"nodes have left the grid or cache was stopped): " + cacheName);
return res;
}
/**
* Performs flush.
*
* @throws IgniteCheckedException If failed.
*/
private void doFlush() throws IgniteCheckedException {
lastFlushTime = U.currentTimeMillis();
List<IgniteInternalFuture> activeFuts0 = null;
int doneCnt = 0;
for (IgniteInternalFuture<?> f : activeFuts) {
if (!f.isDone()) {
if (activeFuts0 == null)
activeFuts0 = new ArrayList<>((int)(activeFuts.size() * 1.2));
activeFuts0.add(f);
}
else {
f.get();
doneCnt++;
}
}
if (activeFuts0 == null || activeFuts0.isEmpty())
return;
while (true) {
Queue<IgniteInternalFuture<?>> q = null;
for (Buffer buf : bufMappings.values()) {
IgniteInternalFuture<?> flushFut = buf.flush();
if (flushFut != null) {
if (q == null)
q = new ArrayDeque<>(bufMappings.size() * 2);
q.add(flushFut);
}
}
if (q != null) {
assert !q.isEmpty();
boolean err = false;
for (IgniteInternalFuture fut = q.poll(); fut != null; fut = q.poll()) {
try {
fut.get();
}
catch (IgniteClientDisconnectedCheckedException e) {
if (log.isDebugEnabled())
log.debug("Failed to flush buffer: " + e);
throw CU.convertToCacheException(e);
}
catch (IgniteCheckedException e) {
if (log.isDebugEnabled())
log.debug("Failed to flush buffer: " + e);
err = true;
}
}
if (err)
// Remaps needed - flush buffers.
continue;
}
doneCnt = 0;
for (int i = 0; i < activeFuts0.size(); i++) {
IgniteInternalFuture f = activeFuts0.get(i);
if (f == null)
doneCnt++;
else if (f.isDone()) {
f.get();
doneCnt++;
activeFuts0.set(i, null);
}
else
break;
}
if (doneCnt == activeFuts0.size())
return;
}
}
/** {@inheritDoc} */
@SuppressWarnings("ForLoopReplaceableByForEach")
@Override public void flush() throws CacheException {
enterBusy();
try {
doFlush();
}
catch (IgniteCheckedException e) {
throw CU.convertToCacheException(e);
}
finally {
leaveBusy();
}
}
/**
* Flushes every internal buffer if buffer was flushed before passed in
* threshold.
* <p>
* Does not wait for result and does not fail on errors assuming that this method
* should be called periodically.
*/
@Override public void tryFlush() throws IgniteInterruptedException {
if (!busyLock.enterBusy())
return;
try {
for (Buffer buf : bufMappings.values())
buf.flush();
lastFlushTime = U.currentTimeMillis();
}
catch (IgniteInterruptedCheckedException e) {
throw GridCacheUtils.convertToCacheException(e);
}
finally {
leaveBusy();
}
}
/**
* @param cancel {@code True} to close with cancellation.
* @throws CacheException If failed.
*/
@Override public void close(boolean cancel) throws CacheException {
try {
closeEx(cancel);
}
catch (IgniteCheckedException e) {
throw CU.convertToCacheException(e);
}
}
/**
* @param cancel {@code True} to close with cancellation.
* @throws IgniteCheckedException If failed.
*/
public void closeEx(boolean cancel) throws IgniteCheckedException {
closeEx(cancel, null);
}
/**
* @param cancel {@code True} to close with cancellation.
* @param err Error.
* @throws IgniteCheckedException If failed.
*/
public void closeEx(boolean cancel, IgniteCheckedException err) throws IgniteCheckedException {
if (!closed.compareAndSet(false, true))
return;
busyLock.block();
if (log.isDebugEnabled())
log.debug("Closing data streamer [ldr=" + this + ", cancel=" + cancel + ']');
IgniteCheckedException e = null;
try {
// Assuming that no methods are called on this loader after this method is called.
if (cancel) {
cancelled = true;
for (Buffer buf : bufMappings.values())
buf.cancelAll(err);
}
else
doFlush();
ctx.event().removeLocalEventListener(discoLsnr);
ctx.io().removeMessageListener(topic);
}
catch (IgniteCheckedException e0) {
e = e0;
}
fut.onDone(null, e != null ? e : err);
if (e != null)
throw e;
}
/**
* @param reconnectFut Reconnect future.
* @throws IgniteCheckedException If failed.
*/
public void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException {
IgniteClientDisconnectedCheckedException err = new IgniteClientDisconnectedCheckedException(reconnectFut,
"Data streamer has been closed, client node disconnected.");
disconnectErr = (CacheException)CU.convertToCacheException(err);
for (Buffer buf : bufMappings.values())
buf.cancelAll(err);
closeEx(true, err);
}
/**
* @return {@code true} If the loader is closed.
*/
boolean isClosed() {
return fut.isDone();
}
/** {@inheritDoc} */
@Override public void close() throws CacheException {
close(false);
}
/**
* @return Max remap count.
*/
public int maxRemapCount() {
return maxRemapCnt;
}
/**
* @param maxRemapCnt New max remap count.
*/
public void maxRemapCount(int maxRemapCnt) {
this.maxRemapCnt = maxRemapCnt;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(DataStreamerImpl.class, this);
}
/** {@inheritDoc} */
@Override public long getDelay(TimeUnit unit) {
return unit.convert(nextFlushTime() - U.currentTimeMillis(), TimeUnit.MILLISECONDS);
}
/**
* @return Next flush time.
*/
private long nextFlushTime() {
return lastFlushTime + autoFlushFreq;
}
/** {@inheritDoc} */
@Override public int compareTo(Delayed o) {
return nextFlushTime() > ((DataStreamerImpl)o).nextFlushTime() ? 1 : -1;
}
/**
* Check permissions for streaming.
*
* @param perm Security permission.
* @throws org.apache.ignite.plugin.security.SecurityException If permissions are not enough for streaming.
*/
private void checkSecurityPermission(SecurityPermission perm)
throws org.apache.ignite.plugin.security.SecurityException{
if (!ctx.security().enabled())
return;
ctx.security().authorize(cacheName, perm, null);
}
/**
*
*/
private class Buffer {
/** Node. */
private final ClusterNode node;
/** Active futures. */
private final Collection<IgniteInternalFuture<Object>> locFuts;
/** Buffered entries. */
private List<DataStreamerEntry> entries;
/** */
@GridToStringExclude
private GridFutureAdapter<Object> curFut;
/** Local node flag. */
private final boolean isLocNode;
/** ID generator. */
private final AtomicLong idGen = new AtomicLong();
/** Active futures. */
private final ConcurrentMap<Long, GridFutureAdapter<Object>> reqs;
/** */
private final Semaphore sem;
/** Closure to signal on task finish. */
@GridToStringExclude
private final IgniteInClosure<IgniteInternalFuture<Object>> signalC = new IgniteInClosure<IgniteInternalFuture<Object>>() {
@Override public void apply(IgniteInternalFuture<Object> t) {
signalTaskFinished(t);
}
};
/**
* @param node Node.
*/
Buffer(ClusterNode node) {
assert node != null;
this.node = node;
locFuts = new GridConcurrentHashSet<>();
reqs = new ConcurrentHashMap8<>();
// Cache local node flag.
isLocNode = node.equals(ctx.discovery().localNode());
entries = newEntries();
curFut = new GridFutureAdapter<>();
curFut.listen(signalC);
sem = new Semaphore(parallelOps);
}
/**
* @param newEntries Infos.
* @param topVer Topology version.
* @param lsnr Listener for the operation future.
* @throws IgniteInterruptedCheckedException If failed.
* @return Future for operation.
*/
@Nullable GridFutureAdapter<?> update(Iterable<DataStreamerEntry> newEntries,
AffinityTopologyVersion topVer,
IgniteInClosure<IgniteInternalFuture<?>> lsnr) throws IgniteInterruptedCheckedException {
List<DataStreamerEntry> entries0 = null;
GridFutureAdapter<Object> curFut0;
synchronized (this) {
curFut0 = curFut;
curFut0.listen(lsnr);
for (DataStreamerEntry entry : newEntries)
entries.add(entry);
if (entries.size() >= bufSize) {
entries0 = entries;
entries = newEntries();
curFut = new GridFutureAdapter<>();
curFut.listen(signalC);
}
}
if (entries0 != null) {
submit(entries0, topVer, curFut0);
if (cancelled)
curFut0.onDone(new IgniteCheckedException("Data streamer has been cancelled: " +
DataStreamerImpl.this));
else if (ctx.clientDisconnected())
curFut0.onDone(new IgniteClientDisconnectedCheckedException(ctx.cluster().clientReconnectFuture(),
"Client node disconnected."));
}
return curFut0;
}
/**
* @return Fresh collection with some space for outgrowth.
*/
private List<DataStreamerEntry> newEntries() {
return new ArrayList<>((int)(bufSize * 1.2));
}
/**
* @return Future if any submitted.
*
* @throws IgniteInterruptedCheckedException If thread has been interrupted.
*/
@Nullable IgniteInternalFuture<?> flush() throws IgniteInterruptedCheckedException {
List<DataStreamerEntry> entries0 = null;
GridFutureAdapter<Object> curFut0 = null;
synchronized (this) {
if (!entries.isEmpty()) {
entries0 = entries;
curFut0 = curFut;
entries = newEntries();
curFut = new GridFutureAdapter<>();
curFut.listen(signalC);
}
}
if (entries0 != null)
submit(entries0, null, curFut0);
// Create compound future for this flush.
GridCompoundFuture<Object, Object> res = null;
for (IgniteInternalFuture<Object> f : locFuts) {
if (res == null)
res = new GridCompoundFuture<>();
res.add(f);
}
for (IgniteInternalFuture<Object> f : reqs.values()) {
if (res == null)
res = new GridCompoundFuture<>();
res.add(f);
}
if (res != null)
res.markInitialized();
return res;
}
/**
* Increments active tasks count.
*
* @throws IgniteInterruptedCheckedException If thread has been interrupted.
*/
private void incrementActiveTasks() throws IgniteInterruptedCheckedException {
U.acquire(sem);
}
/**
* @param f Future that finished.
*/
private void signalTaskFinished(IgniteInternalFuture<Object> f) {
assert f != null;
sem.release();
}
/**
* @param entries Entries to submit.
* @param topVer Topology version.
* @param curFut Current future.
* @throws IgniteInterruptedCheckedException If interrupted.
*/
private void submit(final Collection<DataStreamerEntry> entries,
@Nullable AffinityTopologyVersion topVer,
final GridFutureAdapter<Object> curFut)
throws IgniteInterruptedCheckedException {
assert entries != null;
assert !entries.isEmpty();
assert curFut != null;
incrementActiveTasks();
IgniteInternalFuture<Object> fut;
if (isLocNode) {
fut = ctx.closure().callLocalSafe(
new DataStreamerUpdateJob(ctx, log, cacheName, entries, false, skipStore, rcvr), false);
locFuts.add(fut);
fut.listen(new IgniteInClosure<IgniteInternalFuture<Object>>() {
@Override public void apply(IgniteInternalFuture<Object> t) {
try {
boolean rmv = locFuts.remove(t);
assert rmv;
curFut.onDone(t.get());
}
catch (IgniteCheckedException e) {
curFut.onDone(e);
}
}
});
}
else {
try {
for (DataStreamerEntry e : entries) {
e.getKey().prepareMarshal(cacheObjCtx);
CacheObject val = e.getValue();
if (val != null)
val.prepareMarshal(cacheObjCtx);
}
if (updaterBytes == null) {
assert rcvr != null;
updaterBytes = ctx.config().getMarshaller().marshal(rcvr);
}
if (topicBytes == null)
topicBytes = ctx.config().getMarshaller().marshal(topic);
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to marshal (request will not be sent).", e);
return;
}
GridDeployment dep = null;
GridPeerDeployAware jobPda0 = null;
if (ctx.deploy().enabled()) {
try {
jobPda0 = jobPda;
assert jobPda0 != null;
dep = ctx.deploy().deploy(jobPda0.deployClass(), jobPda0.classLoader());
GridCacheAdapter<Object, Object> cache = ctx.cache().internalCache(cacheName);
if (cache != null)
cache.context().deploy().onEnter();
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to deploy class (request will not be sent): " + jobPda0.deployClass(), e);
return;
}
if (dep == null)
U.warn(log, "Failed to deploy class (request will be sent): " + jobPda0.deployClass());
}
long reqId = idGen.incrementAndGet();
fut = curFut;
reqs.put(reqId, (GridFutureAdapter<Object>)fut);
if (topVer == null)
topVer = ctx.cache().context().exchange().readyAffinityVersion();
DataStreamerRequest req = new DataStreamerRequest(
reqId,
topicBytes,
cacheName,
updaterBytes,
entries,
true,
skipStore,
dep != null ? dep.deployMode() : null,
dep != null ? jobPda0.deployClass().getName() : null,
dep != null ? dep.userVersion() : null,
dep != null ? dep.participants() : null,
dep != null ? dep.classLoaderId() : null,
dep == null,
topVer);
try {
ctx.io().send(node, TOPIC_DATASTREAM, req, PUBLIC_POOL);
if (log.isDebugEnabled())
log.debug("Sent request to node [nodeId=" + node.id() + ", req=" + req + ']');
}
catch (IgniteCheckedException e) {
GridFutureAdapter<Object> fut0 = ((GridFutureAdapter<Object>)fut);
try {
if (ctx.discovery().alive(node) && ctx.discovery().pingNode(node.id()))
fut0.onDone(e);
else
fut0.onDone(new ClusterTopologyCheckedException("Failed to send request (node has left): "
+ node.id()));
}
catch (IgniteClientDisconnectedCheckedException e0) {
fut0.onDone(e0);
}
}
}
}
/**
*
*/
void onNodeLeft() {
assert !isLocNode;
assert bufMappings.get(node.id()) != this;
if (log.isDebugEnabled())
log.debug("Forcibly completing futures (node has left): " + node.id());
Exception e = new ClusterTopologyCheckedException("Failed to wait for request completion " +
"(node has left): " + node.id());
for (GridFutureAdapter<Object> f : reqs.values())
f.onDone(e);
// Make sure to complete current future.
GridFutureAdapter<Object> curFut0;
synchronized (this) {
curFut0 = curFut;
}
curFut0.onDone(e);
}
/**
* @param res Response.
*/
void onResponse(DataStreamerResponse res) {
if (log.isDebugEnabled())
log.debug("Received data load response: " + res);
GridFutureAdapter<?> f = reqs.remove(res.requestId());
if (f == null) {
if (log.isDebugEnabled())
log.debug("Future for request has not been found: " + res.requestId());
return;
}
Throwable err = null;
byte[] errBytes = res.errorBytes();
if (errBytes != null) {
try {
GridPeerDeployAware jobPda0 = jobPda;
err = ctx.config().getMarshaller().unmarshal(
errBytes,
jobPda0 != null ? jobPda0.classLoader() : U.gridClassLoader());
}
catch (IgniteCheckedException e) {
f.onDone(null, new IgniteCheckedException("Failed to unmarshal response.", e));
return;
}
}
f.onDone(null, err);
if (log.isDebugEnabled())
log.debug("Finished future [fut=" + f + ", reqId=" + res.requestId() + ", err=" + err + ']');
}
/**
* @param err Error.
*/
void cancelAll(@Nullable IgniteCheckedException err) {
if (err == null)
err = new IgniteCheckedException("Data streamer has been cancelled: " + DataStreamerImpl.this);
for (IgniteInternalFuture<?> f : locFuts) {
try {
f.cancel();
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to cancel mini-future.", e);
}
}
for (GridFutureAdapter<?> f : reqs.values())
f.onDone(err);
}
/** {@inheritDoc} */
@Override public String toString() {
int size;
synchronized (this) {
size = entries.size();
}
return S.toString(Buffer.class, this,
"entriesCnt", size,
"locFutsSize", locFuts.size(),
"reqsSize", reqs.size());
}
}
/**
* Data streamer peer-deploy aware.
*/
private class DataStreamerPda implements GridPeerDeployAware {
/** */
private static final long serialVersionUID = 0L;
/** Deploy class. */
private Class<?> cls;
/** Class loader. */
private ClassLoader ldr;
/** Collection of objects to detect deploy class and class loader. */
private Collection<Object> objs;
/**
* Constructs data streamer peer-deploy aware.
*
* @param objs Collection of objects to detect deploy class and class loader.
*/
private DataStreamerPda(Object... objs) {
this.objs = Arrays.asList(objs);
}
/** {@inheritDoc} */
@Override public Class<?> deployClass() {
if (cls == null) {
Class<?> cls0 = null;
if (depCls != null)
cls0 = depCls;
else {
for (Iterator<Object> it = objs.iterator(); (cls0 == null || U.isJdk(cls0)) && it.hasNext();) {
Object o = it.next();
if (o != null)
cls0 = U.detectClass(o);
}
if (cls0 == null || U.isJdk(cls0))
cls0 = DataStreamerImpl.class;
}
assert cls0 != null : "Failed to detect deploy class [objs=" + objs + ']';
cls = cls0;
}
return cls;
}
/** {@inheritDoc} */
@Override public ClassLoader classLoader() {
if (ldr == null) {
ClassLoader ldr0 = deployClass().getClassLoader();
// Safety.
if (ldr0 == null)
ldr0 = U.gridClassLoader();
assert ldr0 != null : "Failed to detect classloader [objs=" + objs + ']';
ldr = ldr0;
}
return ldr;
}
}
/**
* Isolated receiver which only loads entry initial value.
*/
private static class IsolatedUpdater implements StreamReceiver<KeyCacheObject, CacheObject>,
DataStreamerCacheUpdaters.InternalUpdater {
/** */
private static final long serialVersionUID = 0L;
/** {@inheritDoc} */
@Override public void receive(IgniteCache<KeyCacheObject, CacheObject> cache,
Collection<Map.Entry<KeyCacheObject, CacheObject>> entries) {
IgniteCacheProxy<KeyCacheObject, CacheObject> proxy = (IgniteCacheProxy<KeyCacheObject, CacheObject>)cache;
GridCacheAdapter<KeyCacheObject, CacheObject> internalCache = proxy.context().cache();
if (internalCache.isNear())
internalCache = internalCache.context().near().dht();
GridCacheContext cctx = internalCache.context();
AffinityTopologyVersion topVer = cctx.affinity().affinityTopologyVersion();
GridCacheVersion ver = cctx.versions().isolatedStreamerVersion();
long ttl = CU.TTL_ETERNAL;
long expiryTime = CU.EXPIRE_TIME_ETERNAL;
ExpiryPolicy plc = cctx.expiry();
for (Entry<KeyCacheObject, CacheObject> e : entries) {
try {
e.getKey().finishUnmarshal(cctx.cacheObjectContext(), cctx.deploy().globalLoader());
GridCacheEntryEx entry = internalCache.entryEx(e.getKey(), topVer);
if (plc != null) {
ttl = CU.toTtl(plc.getExpiryForCreation());
if (ttl == CU.TTL_ZERO)
continue;
else if (ttl == CU.TTL_NOT_CHANGED)
ttl = 0;
expiryTime = CU.toExpireTime(ttl);
}
entry.initialValue(e.getValue(),
ver,
ttl,
expiryTime,
false,
topVer,
GridDrType.DR_LOAD);
cctx.evicts().touch(entry, topVer);
CU.unwindEvicts(cctx);
entry.onUnlock();
}
catch (GridDhtInvalidPartitionException | GridCacheEntryRemovedException ignored) {
// No-op.
}
catch (IgniteCheckedException ex) {
IgniteLogger log = cache.unwrap(Ignite.class).log();
U.error(log, "Failed to set initial value for cache entry: " + e, ex);
}
}
}
}
}
| |
/*******************************************************************************
* Copyright 2016 Jalian Systems Pvt. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.sourceforge.marathon.runtime;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CoderResult;
import java.nio.charset.CodingErrorAction;
import java.util.logging.Logger;
/**
* A WriterOutputStream converts java.io.Writer to java.io.OutputStream.
*
* @author Yoko Harada <yokolet@gmail.com>
*/
public class WriterOutputStream extends OutputStream {
public static final Logger LOGGER = Logger.getLogger(WriterOutputStream.class.getName());
private final Writer writer;
private boolean isOpen = true;
private CharsetDecoder decoder;
/**
* Creates WriterOutputStream from given java.io.Writer object with a
* default encoding.
*
* @param writer
* java.io.Writer object to be converted to.
*/
public WriterOutputStream(Writer writer) {
this(writer, null);
}
/**
* Creates WriterOutputStream from given java.io.Writer object with a
* specified encoding.
*
* @param writer
* java.io.Writer object to be converted to.
*/
public WriterOutputStream(Writer writer, String encoding) {
this.writer = writer;
if (encoding == null && writer instanceof OutputStreamWriter) {
// this encoding might be null when writer has been closed
encoding = ((OutputStreamWriter) writer).getEncoding();
}
if (encoding == null) {
encoding = Charset.defaultCharset().name();
} else if (!Charset.isSupported(encoding)) {
throw new IllegalArgumentException(encoding + " is not supported");
}
decoder = Charset.forName(encoding).newDecoder();
decoder.onMalformedInput(CodingErrorAction.REPLACE);
decoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
}
/**
* Closes this output stream and releases any system resources associated
* with this stream. The general contract of <code>close</code> is that it
* closes the output stream. A closed stream cannot perform output
* operations and cannot be reopened.
* <p>
*
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void close() throws IOException {
synchronized (writer) {
if (!isOpen) {
throw new IOException("This stream has been already closed.");
}
isOpen = false;
decoder = null;
writer.close();
}
}
/**
* Flushes this output stream and forces any buffered output bytes to be
* written out. The general contract of <code>flush</code> is that calling
* it is an indication that, if any bytes previously written have been
* buffered by the implementation of the output stream, such bytes should
* immediately be written to their intended destination.
* <p>
* If the intended destination of this stream is an abstraction provided by
* the underlying operating system, for example a file, then flushing the
* stream guarantees only that bytes previously written to the stream are
* passed to the operating system for writing; it does not guarantee that
* they are actually written to a physical device such as a disk drive.
* <p>
*
* @exception IOException
* if an I/O error occurs.
*/
@Override
public void flush() throws IOException {
synchronized (writer) {
if (!isOpen) {
return;
}
writer.flush();
}
}
/**
* Writes the specified byte to this output stream. The general contract for
* <code>write</code> is that one byte is written to the output stream. The
* byte to be written is the eight low-order bits of the argument
* <code>b</code>. The 24 high-order bits of <code>b</code> are ignored.
*
* @param b
* the <code>byte</code>.
* @exception IOException
* if an I/O error occurs. In particular, an
* <code>IOException</code> may be thrown if the output
* stream has been closed.
*/
@Override
public void write(int b) throws IOException {
byte[] bb = new byte[] { (byte) b };
write(bb, 0, 1);
}
/**
* Writes <code>b.length</code> bytes from the specified byte array to this
* output stream. The general contract for <code>write(b)</code> is that it
* should have exactly the same effect as the call
* <code>write(b, 0, b.length)</code>.
*
* @param b
* the data.
* @exception IOException
* if an I/O error occurs.
* @see java.io.OutputStream#write(byte[], int, int)
*/
@Override
public void write(byte[] b) throws IOException {
write(b, 0, b.length);
}
/**
* Writes <code>len</code> bytes from the specified byte array starting at
* offset <code>off</code> to this output stream. The general contract for
* <code>write(b, off, len)</code> is that some of the bytes in the array
* <code>b</code> are written to the output stream in order; element
* <code>b[off]</code> is the first byte written and
* <code>b[off+len-1]</code> is the last byte written by this operation.
* <p>
* If <code>off</code> is negative, or <code>len</code> is negative, or
* <code>off+len</code> is greater than the length of the array
* <code>b</code>, then an <tt>IndexOutOfBoundsException</tt> is thrown.
*
* @param b
* the data.
* @param off
* the start offset in the data.
* @param len
* the number of bytes to write.
* @exception IOException
* if an I/O error occurs. In particular, an
* <code>IOException</code> is thrown if the output stream is
* closed.
*/
@Override
public void write(byte[] b, int off, int len) throws IOException {
synchronized (writer) {
if (!isOpen) {
return;
}
if (off < 0 || len <= 0 || off + len > b.length) {
throw new IndexOutOfBoundsException();
}
ByteBuffer bytes = ByteBuffer.wrap(b, off, len);
CharBuffer chars = CharBuffer.allocate(len);
byte2char(bytes, chars);
char[] cbuf = new char[chars.length()];
chars.get(cbuf, 0, chars.length());
writer.write(cbuf);
writer.flush();
}
}
private void byte2char(ByteBuffer bytes, CharBuffer chars) throws IOException {
decoder.reset();
chars.clear();
CoderResult result = decoder.decode(bytes, chars, true);
if (result.isError() || result.isOverflow()) {
throw new IOException(result.toString());
} else if (result.isUnderflow()) {
chars.flip();
}
}
}
| |
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package keywhiz.cli;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import javax.inject.Inject;
import keywhiz.api.ClientDetailResponse;
import keywhiz.api.GroupDetailResponse;
import keywhiz.api.SecretDetailResponse;
import keywhiz.api.model.Client;
import keywhiz.api.model.Group;
import keywhiz.api.model.SanitizedSecret;
import keywhiz.client.KeywhizClient;
public class Printing {
private final KeywhizClient keywhizClient;
private static final String INDENT = Strings.repeat("\t", 2);
@Inject
public Printing(final KeywhizClient keywhizClient) {
this.keywhizClient = keywhizClient;
}
public void printClientWithDetails(Client client) {
System.out.println(client.getName());
ClientDetailResponse clientDetails;
try {
clientDetails = keywhizClient.clientDetailsForId(client.getId());
} catch (IOException e) {
throw Throwables.propagate(e);
}
if (clientDetails.lastSeen == null) {
System.out.println("\tLast Seen: never");
} else {
String lastSeen = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss").format(new Date (clientDetails.lastSeen.toEpochSecond()*1000));
System.out.printf("\tLast Seen: %s%n", lastSeen);
}
System.out.println("\tGroups:");
clientDetails.groups.stream()
.sorted(Comparator.comparing(Group::getName))
.forEach(g -> System.out.println(INDENT + g.getName()));
System.out.println("\tSecrets:");
clientDetails.secrets.stream()
.sorted(Comparator.comparing(SanitizedSecret::name))
.forEach(s -> System.out.println(INDENT + SanitizedSecret.displayName(s)));
}
public void printGroupWithDetails(Group group) {
System.out.println(group.getName());
GroupDetailResponse groupDetails;
try {
groupDetails = keywhizClient.groupDetailsForId(group.getId());
} catch (IOException e) {
throw Throwables.propagate(e);
}
System.out.println("\tClients:");
groupDetails.getClients().stream()
.sorted(Comparator.comparing(Client::getName))
.forEach(c -> System.out.println(INDENT + c.getName()));
System.out.println("\tSecrets:");
groupDetails.getSecrets().stream()
.sorted(Comparator.comparing(SanitizedSecret::name))
.forEach(s -> System.out.println(INDENT + SanitizedSecret.displayName(s)));
System.out.println("\tMetadata:");
if (!groupDetails.getMetadata().isEmpty()) {
String metadata;
try {
metadata = new ObjectMapper().writeValueAsString(groupDetails.getMetadata());
} catch (JsonProcessingException e) {
throw Throwables.propagate(e);
}
System.out.println(INDENT + metadata);
}
if (!groupDetails.getDescription().isEmpty()) {
System.out.println("\tDescription:");
System.out.println(INDENT + groupDetails.getDescription());
}
if (!groupDetails.getCreatedBy().isEmpty()) {
System.out.println("\tCreated by:");
System.out.println(INDENT + groupDetails.getCreatedBy());
}
System.out.println("\tCreated at:");
Date d = new Date(groupDetails.getCreationDate().toEpochSecond() * 1000);
System.out.println(INDENT + DateFormat.getDateTimeInstance().format(d));
if (!groupDetails.getUpdatedBy().isEmpty()) {
System.out.println("\tUpdated by:");
System.out.println(INDENT + groupDetails.getUpdatedBy());
}
System.out.println("\tUpdated at:");
d = new Date(groupDetails.getUpdateDate().toEpochSecond() * 1000);
System.out.println(INDENT + DateFormat.getDateTimeInstance().format(d));
}
public void printSanitizedSecretWithDetails(SanitizedSecret secret) {
System.out.println(SanitizedSecret.displayName(secret));
SecretDetailResponse secretDetails;
try {
secretDetails = keywhizClient.secretDetailsForId(secret.id());
} catch (IOException e) {
throw Throwables.propagate(e);
}
System.out.println("\tGroups:");
secretDetails.groups.stream()
.sorted(Comparator.comparing(Group::getName))
.forEach(g -> System.out.println(INDENT + g.getName()));
System.out.println("\tClients:");
secretDetails.clients.stream()
.sorted(Comparator.comparing(Client::getName))
.forEach(c -> System.out.println(INDENT + c.getName()));
System.out.println("\tMetadata:");
if (!secret.metadata().isEmpty()) {
String metadata;
try {
metadata = new ObjectMapper().writeValueAsString(secret.metadata());
} catch (JsonProcessingException e) {
throw Throwables.propagate(e);
}
System.out.println(INDENT + metadata);
}
if (secret.expiry() > 0) {
System.out.println("\tExpiry:");
Date d = new Date(secret.expiry() * 1000);
System.out.println(INDENT + DateFormat.getDateTimeInstance().format(d));
}
if (!secret.description().isEmpty()) {
System.out.println("\tDescription:");
System.out.println(INDENT + secret.description());
}
if (!secret.createdBy().isEmpty()) {
System.out.println("\tCreated by:");
System.out.println(INDENT + secret.createdBy());
}
System.out.println("\tCreated at:");
Date d = new Date(secret.createdAt().toEpochSecond() * 1000);
System.out.println(INDENT + DateFormat.getDateTimeInstance().format(d));
if (!secret.updatedBy().isEmpty()) {
System.out.println("\tUpdated by:");
System.out.println(INDENT + secret.updatedBy());
}
System.out.println("\tUpdated at:");
d = new Date(secret.updatedAt().toEpochSecond() * 1000);
System.out.println(INDENT + DateFormat.getDateTimeInstance().format(d));
}
public void printAllClients(List<Client> clients) {
clients.stream()
.sorted(Comparator.comparing(Client::getName))
.forEach(c -> System.out.println(c.getName()));
}
public void printAllGroups(List<Group> groups) {
groups.stream()
.sorted(Comparator.comparing(Group::getName))
.forEach(g -> System.out.println(g.getName()));
}
public void printAllSanitizedSecrets(List<SanitizedSecret> secrets) {
secrets.stream()
.sorted(Comparator.comparing(SanitizedSecret::name))
.forEach(s -> System.out.println(SanitizedSecret.displayName(s)));
}
public void printSecretVersions(List<SanitizedSecret> versions, Long currentVersion) {
if (versions.isEmpty()) {
return;
}
System.out.println(versions.get(0).name() + "\n");
if (currentVersion < 0) {
System.out.println("Current secret version unknown!");
}
for (SanitizedSecret secret : versions) {
if (secret.version().isPresent()) {
if (secret.version().get().equals(currentVersion)) {
System.out.println(String.format("*** Current secret version id: %d ***", secret.version().get()));
} else {
System.out.println(String.format("Version id for rollback: %d", secret.version().get()));
}
} else {
System.out.println("Version id for rollback: Unknown!");
}
if (secret.createdBy().isEmpty()) {
System.out.println(INDENT + String.format("Created on %s (creator unknown)",
DateFormat.getDateTimeInstance()
.format(new Date(secret.createdAt().toEpochSecond() * 1000))));
} else {
System.out.println(INDENT + String.format("Created by %s on %s", secret.createdBy(),
DateFormat.getDateTimeInstance()
.format(new Date(secret.createdAt().toEpochSecond() * 1000))));
}
if (secret.updatedBy().isEmpty()) {
System.out.println(INDENT + String.format("Updated on %s (updater unknown)",
DateFormat.getDateTimeInstance()
.format(new Date(secret.updatedAt().toEpochSecond() * 1000))));
} else {
System.out.println(INDENT + String.format("Updated by %s on %s", secret.updatedBy(),
DateFormat.getDateTimeInstance()
.format(new Date(secret.updatedAt().toEpochSecond() * 1000))));
}
if (secret.expiry() == 0) {
System.out.println(INDENT + "Does not expire");
} else {
System.out.println(INDENT + String.format("Expires on %s", DateFormat.getDateTimeInstance()
.format(new Date(secret.expiry() * 1000))));
}
if (!secret.checksum().isEmpty()) {
System.out.println(INDENT + String.format("Content HMAC: %s", secret.checksum()));
}
System.out.print("\n"); // Add space between the versions
}
}
}
| |
/*
* Copyright (c) 2017, 2018, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* * Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.storm.spout.dynamic.coordinator;
import com.salesforce.storm.spout.dynamic.Message;
import com.salesforce.storm.spout.dynamic.MessageBus;
import com.salesforce.storm.spout.dynamic.MessageId;
import com.salesforce.storm.spout.dynamic.DefaultVirtualSpoutIdentifier;
import com.salesforce.storm.spout.dynamic.VirtualSpoutIdentifier;
import com.salesforce.storm.spout.dynamic.VirtualSpoutMessageBus;
import com.salesforce.storm.spout.dynamic.config.SpoutConfig;
import com.salesforce.storm.spout.dynamic.DelegateSpout;
import com.salesforce.storm.spout.dynamic.mocks.MockDelegateSpout;
import com.salesforce.storm.spout.dynamic.buffer.FifoBuffer;
import org.apache.storm.tuple.Values;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Clock;
import java.util.HashMap;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Test that the {@link SpoutRunner} starts spouts when passed in and handles their lifecycle correctly.
*/
public class SpoutRunnerTest {
private static final Logger logger = LoggerFactory.getLogger(SpoutRunnerTest.class);
private static final int maxWaitTime = 5;
private ThreadPoolExecutor executorService;
/**
* Shutdown the thread executor service when the test is all over.
* @throws InterruptedException something went wrong.
*/
@AfterEach
public void shutDown() throws InterruptedException {
// Shut down our executor service if it exists
if (executorService == null) {
return;
}
executorService.shutdown();
try {
executorService.awaitTermination(maxWaitTime, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (!executorService.isTerminated()) {
executorService.shutdownNow();
}
}
/**
* Tests the constructor sets things appropriately.
*/
@Test
public void testConstructor() {
// Define inputs
final MessageBus messageBus = new MessageBus(new FifoBuffer());
final Map<VirtualSpoutIdentifier, Queue<MessageId>> ackQueue = new ConcurrentHashMap<>();
final Map<VirtualSpoutIdentifier, Queue<MessageId>> failQueue = new ConcurrentHashMap<>();
final Clock clock = Clock.systemUTC();
// Define some config params
final long consumerStateFlushInterval = 200L;
// Create our spout delegate
final DelegateSpout spout = mock(DelegateSpout.class);
// Create config
final Map<String, Object> topologyConfig = getDefaultConfig(consumerStateFlushInterval);
// Create instance.
SpoutRunner spoutRunner = new SpoutRunner(
spout,
messageBus,
clock,
topologyConfig
);
// Call getters and validate!
assertEquals(clock, spoutRunner.getClock(), "Clock instance is what we expect");
assertEquals(topologyConfig, spoutRunner.getTopologyConfig(), "TopologyConfig looks legit");
assertEquals(
consumerStateFlushInterval,
spoutRunner.getConsumerStateFlushIntervalMs(),
"getConsumerStateFlushIntervalMs() returns right value"
);
assertNotNull(spoutRunner.getStartTime(), "StartTime is null");
assertNotEquals(0, spoutRunner.getStartTime(), "StartTime is not zero");
assertEquals(spout, spoutRunner.getSpout(), "Spout delegate got set");
}
/**
* Tests that
* - our latch counts down when we start the thread.
* - open() is called on the spout on startup.
* - close() is called on the spout on shutdown.
* - spoutRunner.requestStop() shuts down the instance.
* - spoutDelegate.requestStop() shuts down the instance.
*/
@ParameterizedTest
@MethodSource("provideTrueAndFalse")
public void testOpenandCloseOnSpoutIsCalled(final boolean shutdownViaSpout) throws InterruptedException {
// Define inputs
final Clock clock = Clock.systemUTC();
final DefaultVirtualSpoutIdentifier virtualSpoutId = new DefaultVirtualSpoutIdentifier("MyVirtualSpoutId");
// Define some config params
final long consumerStateFlushInterval = TimeUnit.MILLISECONDS.convert(maxWaitTime, TimeUnit.SECONDS);
// Create our spout delegate
final MockDelegateSpout mockSpout = new MockDelegateSpout(virtualSpoutId);
// Setup mock MessageBus
final VirtualSpoutMessageBus messageBus = mock(VirtualSpoutMessageBus.class);
when(messageBus.getAckedMessage(eq(virtualSpoutId))).thenReturn(null);
when(messageBus.getFailedMessage(eq(virtualSpoutId))).thenReturn(null);
// Create config
final Map<String, Object> topologyConfig = getDefaultConfig(consumerStateFlushInterval);
// Create instance.
SpoutRunner spoutRunner = new SpoutRunner(
mockSpout,
messageBus,
clock,
topologyConfig
);
// Start in a separate thread.
final CompletableFuture future = startSpoutRunner(spoutRunner);
// Wait for open to be called since it runs async.
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.wasOpenCalled, equalTo(true));
// Verify open was called once, but not close
assertTrue(mockSpout.wasOpenCalled, "Open was called on our mock spout");
assertFalse(mockSpout.wasCloseCalled, "Close has not been called yet on our mock spout");
// Verify queues got setup
verify(messageBus, times(1)).registerVirtualSpout(eq(virtualSpoutId));
// But not torn down yet
verify(messageBus, never()).unregisterVirtualSpout(any(VirtualSpoutIdentifier.class));
// Shut down
if (shutdownViaSpout) {
logger.info("Requesting stop via Spout.isCompleted()");
mockSpout.completed = true;
} else {
logger.info("Requesting stop via SpoutRunner.requestStop()");
spoutRunner.requestStop();
}
// Wait for thread to stop.
await()
.atMost(maxWaitTime * 2, TimeUnit.SECONDS)
.until(future::isDone);
// Make sure it actually stopped
assertEquals(0, executorService.getActiveCount(), "Should have no running threads");
// verify close was called
assertTrue(mockSpout.wasCloseCalled, "Close was called on our mock spout");
// Verify entries removed from buffer, ackQueue, failQueue
verify(messageBus, times(1)).unregisterVirtualSpout(eq(virtualSpoutId));
}
/**
* Provides various tuple buffer implementation.
*/
public static Object[][] provideTrueAndFalse() {
return new Object[][]{
{ true },
{ false }
};
}
/**
* Tests that if a DelegateSpout has Messages, they get moved into
* the MessageBuffer.
*/
@Test
public void testMessageBufferGetsFilled() {
// Define inputs
final Clock clock = Clock.systemUTC();
final DefaultVirtualSpoutIdentifier virtualSpoutId = new DefaultVirtualSpoutIdentifier("MyVirtualSpoutId");
// Define some config params
final long consumerStateFlushInterval = TimeUnit.MILLISECONDS.convert(maxWaitTime, TimeUnit.SECONDS);
// Create our spout delegate
final MockDelegateSpout mockSpout = new MockDelegateSpout(virtualSpoutId);
// Create message bus.
final MessageBus messageBus = new MessageBus(FifoBuffer.createDefaultInstance());
// Create config
final Map<String, Object> topologyConfig = getDefaultConfig(consumerStateFlushInterval);
// Create instance.
SpoutRunner spoutRunner = new SpoutRunner(
mockSpout,
messageBus,
clock,
topologyConfig
);
// Start in a separate thread.
final CompletableFuture future = startSpoutRunner(spoutRunner);
// Wait for open to be called since it runs async.
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.wasOpenCalled, equalTo(true));
// sanity test
assertEquals(0, messageBus.messageSize(), "MessageBuffer should be empty");
// Now Add some messages to our mock spout
final Message message1 = new Message(new MessageId("namespace", 0, 0L, virtualSpoutId), new Values(1));
final Message message2 = new Message(new MessageId("namespace", 0, 1L, virtualSpoutId), new Values(1));
final Message message3 = new Message(new MessageId("namespace", 0, 2L, virtualSpoutId), new Values(1));
mockSpout.emitQueue.add(message1);
mockSpout.emitQueue.add(message2);
mockSpout.emitQueue.add(message3);
// Now wait for them to show up in our buffer
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> messageBus.messageSize() == 3, equalTo(true));
// Sanity test
assertEquals(3, messageBus.messageSize(), "MessageBuffer should have 3 entries");
logger.info("Requesting stop via SpoutRunner.requestStop()");
spoutRunner.requestStop();
// Wait for thread to stop.
await()
.atMost(maxWaitTime * 2, TimeUnit.SECONDS)
.until(future::isDone);
// Make sure it actually stopped
assertEquals(0, executorService.getActiveCount(), "Should have no running threads");
// verify close was called
assertTrue(mockSpout.wasCloseCalled, "Close was called on our mock spout");
}
/**
* Tests that fails get sent to the spout instance.
*/
@Test
public void testFailsGetSentToSpout() {
// Define inputs
final Clock clock = Clock.systemUTC();
final DefaultVirtualSpoutIdentifier virtualSpoutId = new DefaultVirtualSpoutIdentifier("MyVirtualSpoutId");
// Define some config params
final long consumerStateFlushInterval = TimeUnit.MILLISECONDS.convert(maxWaitTime, TimeUnit.SECONDS);
// Create our spout delegate
final MockDelegateSpout mockSpout = new MockDelegateSpout(virtualSpoutId);
// Create message bus.
final MessageBus messageBus = new MessageBus(FifoBuffer.createDefaultInstance());
// Add other virtual spout id in ack and fail queues
final VirtualSpoutIdentifier otherVirtualSpoutId = new DefaultVirtualSpoutIdentifier("OtherVirtualSpout");
messageBus.registerVirtualSpout(otherVirtualSpoutId);
// Create config
final Map<String, Object> topologyConfig = getDefaultConfig(consumerStateFlushInterval);
// Create instance.
SpoutRunner spoutRunner = new SpoutRunner(
mockSpout,
messageBus,
clock,
topologyConfig
);
// Start in a separate thread.
final CompletableFuture future = startSpoutRunner(spoutRunner);
// Wait for open to be called since it runs async.
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.wasOpenCalled, equalTo(true));
// sanity test
assertEquals(0, messageBus.messageSize(), "MessageBuffer should be empty");
assertEquals(0, messageBus.ackSize(), "Ack Queue should be empty");
assertEquals(0, messageBus.failSize(), "fail Queue should be empty");
// Create some MessageIds for our virtualSpoutId
final MessageId messageId1 = new MessageId("namespace", 0, 0L, virtualSpoutId);
final MessageId messageId2 = new MessageId("namespace", 0, 1L, virtualSpoutId);
final MessageId messageId3 = new MessageId("namespace", 0, 2L, virtualSpoutId);
// Create some MessageIds for a different virtualSpoutId
final MessageId messageId4 = new MessageId("namespace", 0, 0L, otherVirtualSpoutId);
final MessageId messageId5 = new MessageId("namespace", 0, 1L, otherVirtualSpoutId);
final MessageId messageId6 = new MessageId("namespace", 0, 2L, otherVirtualSpoutId);
// Add them to the appropriate queues
messageBus.fail(messageId1);
messageBus.fail(messageId2);
messageBus.fail(messageId3);
messageBus.fail(messageId4);
messageBus.fail(messageId5);
messageBus.fail(messageId6);
// Now wait for them to show up in our spout instance
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.failedTupleIds.size() == 3, equalTo(true));
// Sanity test
assertEquals(3, messageBus.failSize(), "failQueue should now contain 3 entries (for the other vspout id)");
assertEquals(3, mockSpout.failedTupleIds.size(), "mock spout should have gotten 3 fail() calls");
assertTrue(mockSpout.failedTupleIds.contains(messageId1), "Should have messageId");
assertTrue(mockSpout.failedTupleIds.contains(messageId2), "Should have messageId");
assertTrue(mockSpout.failedTupleIds.contains(messageId3), "Should have messageId");
assertFalse(mockSpout.failedTupleIds.contains(messageId4), "Should NOT have messageId");
assertFalse(mockSpout.failedTupleIds.contains(messageId5), "Should NOT have messageId");
assertFalse(mockSpout.failedTupleIds.contains(messageId6), "Should NOT have messageId");
// Calling getFailedMessage with our VirtualSpoutId should return empty optionals
for (int loopCount = 0; loopCount < 10; loopCount++) {
assertNull(messageBus.getFailedMessage(virtualSpoutId), "Should be empty/null");
}
// Other virtualspout id queue should still be populated
assertEquals(3, messageBus.failSize(), "fail queue for other virtual spout should remain full");
// No failed ids
assertTrue(mockSpout.ackedTupleIds.isEmpty(), "acked() never called");
logger.info("Requesting stop via SpoutRunner.requestStop()");
spoutRunner.requestStop();
// Wait for thread to stop.
await()
.atMost(maxWaitTime * 2, TimeUnit.SECONDS)
.until(future::isDone);
// Make sure it actually stopped
assertEquals(0, executorService.getActiveCount(), "Should have no running threads");
// verify close was called
assertTrue(mockSpout.wasCloseCalled, "Close was called on our mock spout");
}
/**
* Tests that acks get sent to the spout instance.
*/
@Test
public void testAcksGetSentToSpout() {
// Define inputs
final Clock clock = Clock.systemUTC();
final VirtualSpoutIdentifier virtualSpoutId = new DefaultVirtualSpoutIdentifier("MyVirtualSpoutId");
// Define some config params
final long consumerStateFlushInterval = TimeUnit.MILLISECONDS.convert(maxWaitTime, TimeUnit.SECONDS);
// Create our spout delegate
final MockDelegateSpout mockSpout = new MockDelegateSpout(virtualSpoutId);
// Create message bus.
final MessageBus messageBus = new MessageBus(FifoBuffer.createDefaultInstance());
// Add other virtual spout id in ack and fail queues
final DefaultVirtualSpoutIdentifier otherVirtualSpoutId = new DefaultVirtualSpoutIdentifier("OtherVirtualSpout");
messageBus.registerVirtualSpout(otherVirtualSpoutId);
// Create config
final Map<String, Object> topologyConfig = getDefaultConfig(consumerStateFlushInterval);
// Create instance.
SpoutRunner spoutRunner = new SpoutRunner(
mockSpout,
messageBus,
clock,
topologyConfig
);
// Start in a separate thread.
final CompletableFuture future = startSpoutRunner(spoutRunner);
// Wait for open to be called on the spout since it happens async.
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.wasOpenCalled, equalTo(true));
// sanity test
assertEquals(0, messageBus.messageSize(), "MessageBuffer should be empty");
assertEquals(0, messageBus.ackSize(), "Ack Queue should be empty");
assertEquals(0, messageBus.failSize(), "fail Queue should be empty");
// Create some MessageIds for our virtualSpoutId
final MessageId messageId1 = new MessageId("namespace", 0, 0L, virtualSpoutId);
final MessageId messageId2 = new MessageId("namespace", 0, 1L, virtualSpoutId);
final MessageId messageId3 = new MessageId("namespace", 0, 2L, virtualSpoutId);
// Create some MessageIds for a different virtualSpoutId
final MessageId messageId4 = new MessageId("namespace", 0, 0L, otherVirtualSpoutId);
final MessageId messageId5 = new MessageId("namespace", 0, 1L, otherVirtualSpoutId);
final MessageId messageId6 = new MessageId("namespace", 0, 2L, otherVirtualSpoutId);
// Add them to the appropriate queues
messageBus.ack(messageId1);
messageBus.ack(messageId2);
messageBus.ack(messageId3);
messageBus.ack(messageId4);
messageBus.ack(messageId5);
messageBus.ack(messageId6);
// Now wait for them to show up in our spout instance
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.ackedTupleIds.size() == 3, equalTo(true));
// Sanity test
assertEquals(3, messageBus.ackSize(), "ackQueue should now have 3 entries (for other vspoutId)");
assertEquals(3, mockSpout.ackedTupleIds.size(), "mock spout should have gotten 3 ack() calls");
assertTrue(mockSpout.ackedTupleIds.contains(messageId1), "Should have messageId");
assertTrue(mockSpout.ackedTupleIds.contains(messageId2), "Should have messageId");
assertTrue(mockSpout.ackedTupleIds.contains(messageId3), "Should have messageId");
assertFalse(mockSpout.ackedTupleIds.contains(messageId4), "Should NOT have messageId");
assertFalse(mockSpout.ackedTupleIds.contains(messageId5), "Should NOT have messageId");
assertFalse(mockSpout.ackedTupleIds.contains(messageId6), "Should NOT have messageId");
// Calling getAckedMessage with our VirtualSpoutId should return empty optionals
for (int loopCount = 0; loopCount < 10; loopCount++) {
assertNull(messageBus.getAckedMessage(virtualSpoutId), "Should be empty/null");
}
// Other virtualspout id queue should still be populated
assertEquals(3, messageBus.ackSize(), "ack queue for other virtual spout should remain full");
// No failed ids
assertTrue(mockSpout.failedTupleIds.isEmpty(), "Failed() never called");
logger.info("Requesting stop via SpoutRunner.requestStop()");
spoutRunner.requestStop();
// Wait for thread to stop.
await()
.atMost(maxWaitTime * 2, TimeUnit.SECONDS)
.until(future::isDone);
// Make sure it actually stopped
assertEquals(0, executorService.getActiveCount(), "Should have no running threads");
// verify close was called
assertTrue(mockSpout.wasCloseCalled, "Close was called on our mock spout");
}
/**
* Tests that flushState() gets called on spout periodically.
*/
@Test
public void testFlushStateGetsCalled() {
// Define inputs
final Clock clock = Clock.systemUTC();
final DefaultVirtualSpoutIdentifier virtualSpoutId = new DefaultVirtualSpoutIdentifier("MyVirtualSpoutId");
// Define some config params
final long consumerStateFlushInterval = TimeUnit.MILLISECONDS.convert(maxWaitTime, TimeUnit.SECONDS);
// Create our spout delegate
final MockDelegateSpout mockSpout = new MockDelegateSpout(virtualSpoutId);
// Create message bus.
final MessageBus messageBus = new MessageBus(FifoBuffer.createDefaultInstance());
// Add other virtual spout id in ack and fail queues
final DefaultVirtualSpoutIdentifier otherVirtualSpoutId = new DefaultVirtualSpoutIdentifier("OtherVirtualSpout");
messageBus.registerVirtualSpout(otherVirtualSpoutId);
// Create config
final Map<String, Object> topologyConfig = getDefaultConfig(consumerStateFlushInterval);
// Create instance.
SpoutRunner spoutRunner = new SpoutRunner(
mockSpout,
messageBus,
clock,
topologyConfig
);
// Start in a separate thread.
final CompletableFuture future = startSpoutRunner(spoutRunner);
// Wait for open to be called since it runs async.
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.wasOpenCalled, equalTo(true));
// Wait for flush state to get called
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> mockSpout.flushStateCalled, equalTo(true));
logger.info("Requesting stop via SpoutRunner.requestStop()");
spoutRunner.requestStop();
// Wait for thread to stop.
await()
.atMost(maxWaitTime * 2, TimeUnit.SECONDS)
.until(future::isDone);
// Make sure it actually stopped
assertEquals(0, executorService.getActiveCount(), "Should have no running threads");
// verify close was called
assertTrue(mockSpout.wasCloseCalled, "Close was called on our mock spout");
}
private Map<String, Object> getDefaultConfig(long consumerStateFlushIntervalMs) {
final Map<String, Object> topologyConfig = SpoutConfig.setDefaults(new HashMap<>());
topologyConfig.put(SpoutConfig.CONSUMER_STATE_FLUSH_INTERVAL_MS, consumerStateFlushIntervalMs);
return topologyConfig;
}
private CompletableFuture startSpoutRunner(SpoutRunner spoutRunner) {
if (executorService == null) {
executorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(1);
}
// Sanity check
assertEquals(0, executorService.getActiveCount(), "Executor service should be empty");
// Submit task to start
CompletableFuture future = CompletableFuture.runAsync(spoutRunner, executorService);
// Wait until it actually starts.
await()
.atMost(maxWaitTime, TimeUnit.SECONDS)
.until(() -> executorService.getActiveCount() == 1, equalTo(true));
// return the future
return future;
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2008 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.devtools.plugindef;
import java.util.*;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.event.*;
import org.lockss.daemon.*;
import javax.swing.text.*;
import org.lockss.util.*;
import org.lockss.plugin.definable.*;
public class PrintfEditor extends JDialog implements EDPEditor, ConfigParamListener {
protected PrintfTemplate originalTemplate;
protected PrintfTemplate editableTemplate;
private EDPCellData m_data;
private HashMap paramKeys;
private HashMap matchesKeys = new HashMap();
static char[] RESERVED_CHARS = {'[','\\','^','$','.','|','?','*','+','(',')'};
static String RESERVED_STRING = new String(RESERVED_CHARS);
static SimpleAttributeSet PLAIN_ATTR = new SimpleAttributeSet();
static {
StyleConstants.setForeground(PLAIN_ATTR, Color.black);
StyleConstants.setBold(PLAIN_ATTR, false);
StyleConstants.setFontFamily(PLAIN_ATTR, "Helvetica");
StyleConstants.setFontSize(PLAIN_ATTR, 14);
}
int numParameters = 0;
JPanel printfPanel = new JPanel();
ButtonGroup buttonGroup = new ButtonGroup();
JPanel buttonPanel = new JPanel();
JButton cancelButton = new JButton();
JButton saveButton = new JButton();
JLabel formatLabel = new JLabel();
FlowLayout flowLayout1 = new FlowLayout();
JTextArea formatTextArea = new JTextArea();
JPanel parameterPanel = new JPanel();
JLabel parameterLabel = new JLabel();
JTextArea parameterTextArea = new JTextArea();
JButton insertButton = new JButton();
JComboBox paramComboBox = new JComboBox();
GridBagLayout gridBagLayout1 = new GridBagLayout();
ButtonGroup buttonGroup1 = new ButtonGroup();
TitledBorder titledBorder2;
JComboBox matchComboBox = new JComboBox();
JButton insertMatchButton = new JButton();
JPanel matchPanel = new JPanel();
JPanel InsertPanel = new JPanel();
GridLayout gridLayout1 = new GridLayout();
GridBagLayout gridBagLayout2 = new GridBagLayout();
GridBagLayout gridBagLayout3 = new GridBagLayout();
JTabbedPane printfTabPane = new JTabbedPane();
JTextPane editorPane = new JTextPane();
JScrollPane editorPanel = new JScrollPane();
int selectedPane = 0;
private boolean m_isCrawlRuleEditor = false;
private static final String STRING_LITERAL = "String Literal";
protected static Logger logger = Logger.getLogger("PrintfEditor");
public PrintfEditor(Frame frame, String title) {
super(frame, title, false);
originalTemplate = new PrintfTemplate();
editableTemplate = new PrintfTemplate();
try {
jbInit();
pack();
initMatches();
}
catch (Exception exc) {
String logMessage = "Could not set up the printf editor";
logger.critical(logMessage, exc);
JOptionPane.showMessageDialog(frame,
logMessage,
"Printf Editor",
JOptionPane.ERROR_MESSAGE);
}
}
private void initMatches() {
matchesKeys.put(STRING_LITERAL, "");
matchesKeys.put("Any number", "[0-9]+");
matchesKeys.put("Anything", ".*");
matchesKeys.put("Start", "^");
matchesKeys.put("End", "$");
matchesKeys.put("Single path component", "[^/]+");
for(Iterator it = matchesKeys.keySet().iterator(); it.hasNext();) {
matchComboBox.addItem(it.next());
}
}
private void jbInit() throws Exception {
saveButton.setText("Save");
saveButton.addActionListener(new
PrintfTemplateEditor_saveButton_actionAdapter(this));
cancelButton.setText("Cancel");
cancelButton.addActionListener(new
PrintfTemplateEditor_cancelButton_actionAdapter(this));
this.setTitle(this.getTitle() + " Template Editor");
printfPanel.setLayout(gridBagLayout1);
formatLabel.setFont(new java.awt.Font("DialogInput", 0, 12));
formatLabel.setText("Format String:");
buttonPanel.setLayout(flowLayout1);
printfPanel.setBorder(BorderFactory.createEtchedBorder());
printfPanel.setMinimumSize(new Dimension(100, 160));
printfPanel.setPreferredSize(new Dimension(380, 160));
parameterPanel.setLayout(gridBagLayout2);
parameterLabel.setText("Parameters:");
parameterLabel.setFont(new java.awt.Font("DialogInput", 0, 12));
parameterTextArea.setMinimumSize(new Dimension(100, 25));
parameterTextArea.setPreferredSize(new Dimension(200, 25));
parameterTextArea.setEditable(true);
parameterTextArea.setText("");
insertButton.setMaximumSize(new Dimension(136, 20));
insertButton.setMinimumSize(new Dimension(136, 20));
insertButton.setPreferredSize(new Dimension(136, 20));
insertButton.setToolTipText(
"insert the format in the format string and add parameter to list.");
insertButton.setText("Insert Parameter");
insertButton.addActionListener(new
PrintfTemplateEditor_insertButton_actionAdapter(this));
formatTextArea.setMinimumSize(new Dimension(100, 25));
formatTextArea.setPreferredSize(new Dimension(200, 15));
formatTextArea.setText("");
parameterPanel.setBorder(null);
parameterPanel.setMinimumSize(new Dimension(60, 40));
parameterPanel.setPreferredSize(new Dimension(300, 40));
insertMatchButton.addActionListener(new
PrintfTemplateEditor_insertMatchButton_actionAdapter(this));
insertMatchButton.setText("Insert Match");
insertMatchButton.setToolTipText("insert the match in the format string and add parameter to list.");
insertMatchButton.setPreferredSize(new Dimension(136, 20));
insertMatchButton.setMinimumSize(new Dimension(136, 20));
insertMatchButton.setMaximumSize(new Dimension(136, 20));
matchPanel.setPreferredSize(new Dimension(300, 40));
matchPanel.setBorder(null);
matchPanel.setMinimumSize(new Dimension(60, 60));
matchPanel.setLayout(gridBagLayout3);
InsertPanel.setLayout(gridLayout1);
gridLayout1.setColumns(1);
gridLayout1.setRows(2);
gridLayout1.setVgap(0);
InsertPanel.setBorder(BorderFactory.createEtchedBorder());
InsertPanel.setMinimumSize(new Dimension(100, 100));
InsertPanel.setPreferredSize(new Dimension(380, 120));
editorPane.setText("");
editorPane.addKeyListener(new PrintfEditor_editorPane_keyAdapter(this));
printfTabPane.addChangeListener(new PrintfEditor_printfTabPane_changeAdapter(this));
parameterPanel.add(insertButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(8, 6, 13, 8), 0, 10));
parameterPanel.add(paramComboBox, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(8, 8, 13, 0), 258, 11));
paramComboBox.setRenderer(new MyCellRenderer());
InsertPanel.add(matchPanel, null);
InsertPanel.add(parameterPanel, null);
buttonPanel.add(cancelButton, null);
buttonPanel.add(saveButton, null);
this.getContentPane().add(printfTabPane, BorderLayout.NORTH);
this.getContentPane().add(InsertPanel, BorderLayout.CENTER);
matchPanel.add(insertMatchButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(8, 6, 13, 8), 0, 10));
matchPanel.add(matchComboBox, new GridBagConstraints(0, 0, 1, 1, 1.0, 0.0
,GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(8, 8, 13, 0), 258, 11));
printfPanel.add(parameterLabel, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0
,GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(7, 5, 0, 5), 309, 0));
printfPanel.add(formatLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0
,GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(4, 5, 0, 5), 288, 0));
printfPanel.add(formatTextArea, new GridBagConstraints(0, 1, 1, 1, 1.0, 1.0
,GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(6, 5, 0, 5), 300, 34));
printfPanel.add(parameterTextArea, new GridBagConstraints(0, 3, 1, 1, 1.0, 1.0
,GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(6, 5, 6, 5), 300, 34));
printfTabPane.addTab("Editor View", null, editorPanel,"View in Editor");
printfTabPane.addTab("Printf View", null, printfPanel,"Vies as Printf");
editorPane.setCharacterAttributes(PLAIN_ATTR, true);
editorPane.addStyle("PLAIN",editorPane.getLogicalStyle());
editorPanel.getViewport().add(editorPane, null);
this.getContentPane().add(buttonPanel, BorderLayout.SOUTH);
buttonGroup.add(cancelButton);
}
/**
* notifiyParamsChanged
*/
public void notifiyParamsChanged() {
updateParams(m_data);
}
/**
* setEDPData
*
* @param data EDPCellData
*/
public void setCellData(EDPCellData data) {
m_data = data;
paramKeys = data.getPlugin().getPrintfDescrs(!m_isCrawlRuleEditor);
data.getPlugin().addParamListener(this);
setTemplate( (PrintfTemplate) data.getData());
m_isCrawlRuleEditor = data.getKey().equals(DefinableArchivalUnit.KEY_AU_CRAWL_RULES);
// initialize the combobox
updateParams(data);
if (m_isCrawlRuleEditor) {
matchPanel.setVisible(true);
}
else {
matchPanel.setVisible(false);
}
}
void saveButton_actionPerformed(ActionEvent e) {
updateEditableTemplate(selectedPane);
originalTemplate.setFormat(editableTemplate.m_format);
originalTemplate.setTokens(editableTemplate.m_tokens);
m_data.updateTemplateData(originalTemplate);
setVisible(false);
}
void cancelButton_actionPerformed(ActionEvent e) {
setVisible(false);
}
void printfTabPane_stateChanged(ChangeEvent e) {
updateEditableTemplate(selectedPane);
selectedPane = printfTabPane.getSelectedIndex();
updatePane(selectedPane);
}
void updateEditableTemplate(int pane) {
switch (pane) {
case 0: // use the editor to update the template
updateTemplateFromEditor(editableTemplate);
break;
case 1: // use the printf text areas to update the template.
updateTemplateFromPrintf();
break;
}
}
void insertButton_actionPerformed(ActionEvent e) {
Object selected = paramComboBox.getSelectedItem();
ConfigParamDescr descr;
String key;
int type = 0;
String format = "";
if(selected instanceof ConfigParamDescr) {
descr = (ConfigParamDescr) selected;
key = descr.getKey();
type = descr.getType();
switch (type) {
case ConfigParamDescr.TYPE_STRING:
case ConfigParamDescr.TYPE_URL:
case ConfigParamDescr.TYPE_BOOLEAN:
format = "%s";
break;
case ConfigParamDescr.TYPE_INT:
case ConfigParamDescr.TYPE_LONG:
case ConfigParamDescr.TYPE_POS_INT:
NumericPaddingDialog dialog = new NumericPaddingDialog();
Point pos = this.getLocationOnScreen();
dialog.setLocation(pos.x, pos.y);
dialog.pack();
dialog.setVisible(true);
StringBuilder fbuf = new StringBuilder("%");
int width = dialog.getPaddingSize();
boolean is_zero = dialog.useZero();
if (width > 0) {
fbuf.append(".");
if (is_zero) {
fbuf.append(0);
}
fbuf.append(width);
}
if (type == ConfigParamDescr.TYPE_LONG) {
fbuf.append("ld");
} else {
fbuf.append("d");
}
format = fbuf.toString();
break;
case ConfigParamDescr.TYPE_YEAR:
if(key.startsWith(DefinableArchivalUnit.PREFIX_AU_SHORT_YEAR)) {
format = "%02d";
}
else {
format = "%d";
}
break;
case ConfigParamDescr.TYPE_RANGE:
case ConfigParamDescr.TYPE_NUM_RANGE:
case ConfigParamDescr.TYPE_SET:
format = "%s";
break;
}
if (selectedPane == 0) {
insertParameter(descr, format, editorPane.getSelectionStart());
}
else if (selectedPane == 1) {
// add the combobox data value to the edit box
int pos = formatTextArea.getCaretPosition();
formatTextArea.insert(format, pos);
pos = parameterTextArea.getCaretPosition();
parameterTextArea.insert(", " + key, pos);
}
}
else {
key = selected.toString();
format = escapePrintfChars( (String) JOptionPane.showInputDialog(this,
"Enter the string you wish to input",
"String Literal Input",
JOptionPane.OK_CANCEL_OPTION));
if (StringUtil.isNullString(format)) {
return;
}
if(selectedPane == 0) {
insertText(format, PLAIN_ATTR, editorPane.getSelectionStart());
}
else if (selectedPane == 1) {
// add the combobox data value to the edit box
formatTextArea.insert(format, formatTextArea.getCaretPosition());
}
}
}
void insertMatchButton_actionPerformed(ActionEvent e) {
String key = (String) matchComboBox.getSelectedItem();
String format = (String)matchesKeys.get(key);
if (key.equals(STRING_LITERAL)) {
format = escapeReservedChars((String) JOptionPane.showInputDialog(this,
"Enter the string you wish to match",
"String Literal Input",
JOptionPane.OK_CANCEL_OPTION));
if (StringUtil.isNullString(format)) {
return;
}
}
if (selectedPane == 0) {
insertText(format, PLAIN_ATTR, editorPane.getSelectionStart());
}
else {
// add the combobox data value to the edit box
int pos = formatTextArea.getCaretPosition();
formatTextArea.insert(format, pos);
}
}
void editorPane_keyPressed(KeyEvent e) {
StyledDocument doc = editorPane.getStyledDocument();
int pos = editorPane.getCaretPosition();
int code = e.getKeyCode();
Element el;
switch(code) {
case KeyEvent.VK_BACK_SPACE:
case KeyEvent.VK_DELETE:
case KeyEvent.VK_LEFT:
case KeyEvent.VK_KP_LEFT:
if(pos == 0) return;
// we want to get the element to the left of position.
el = doc.getCharacterElement(pos-1);
break;
case KeyEvent.VK_RIGHT:
case KeyEvent.VK_KP_RIGHT:
// we want to get the element to the right of position.
el = doc.getCharacterElement(pos + 1);
break;
default:
return; // bail we don't handle it.
}
AttributeSet attr = el.getAttributes();
String el_name = (String) attr.getAttribute(StyleConstants.NameAttribute);
int el_range = el.getEndOffset() - el.getStartOffset()-1;
if (el_name.startsWith("Parameter") &&
StyleConstants.getComponent(attr) != null) {
try {
switch (code) {
case KeyEvent.VK_BACK_SPACE:
case KeyEvent.VK_DELETE:
doc.remove(el.getStartOffset(), el_range);
break;
case KeyEvent.VK_LEFT:
case KeyEvent.VK_KP_LEFT:
editorPane.setCaretPosition(pos - el_range);
break;
case KeyEvent.VK_RIGHT:
case KeyEvent.VK_KP_RIGHT:
editorPane.setCaretPosition(pos + (el_range));
break;
}
}
catch (BadLocationException ex) {
}
}
}
private void insertParameter(ConfigParamDescr descr, String format, int pos) {
try {
StyledDocument doc = (StyledDocument) editorPane.getDocument();
// The component must first be wrapped in a style
Style style = doc.addStyle("Parameter-" + numParameters, null);
JLabel label = new JLabel(descr.getDisplayName());
label.setAlignmentY(0.8f); // make sure we line up
label.setFont(new Font("Helvetica", Font.PLAIN, 14));
label.setForeground(Color.BLUE);
label.setName(descr.getKey());
label.setToolTipText("key: " + descr.getKey() + " format: " + format);
StyleConstants.setComponent(style, label);
doc.insertString(pos, format, style);
numParameters++;
}
catch (BadLocationException e) {
}
}
private void insertText(String text, AttributeSet set, int pos) {
try {
editorPane.getDocument().insertString(pos, text, set);
}
catch (BadLocationException ex) {
}
}
private void appendText(String text, AttributeSet set) {
insertText(text, set, editorPane.getDocument().getLength());
}
private void updateTemplateFromPrintf() {
String format = formatTextArea.getText();
String parameters = parameterTextArea.getText();
editableTemplate.setFormat(format);
editableTemplate.setParameters(parameters);
}
private void updateTemplateFromEditor(PrintfTemplate template) {
ArrayList params = new ArrayList();
String format = null;
int text_length = editorPane.getDocument().getLength();
try {
format = editorPane.getDocument().getText(0,text_length);
}
catch (BadLocationException ex1) {
}
Element section_el = editorPane.getDocument().getDefaultRootElement();
// Get number of paragraphs.
int num_para = section_el.getElementCount();
for (int p_count = 0; p_count < num_para; p_count++) {
Element para_el = section_el.getElement(p_count);
// Enumerate the content elements
int num_cont = para_el.getElementCount();
for (int c_count = 0; c_count < num_cont; c_count++) {
Element content_el = para_el.getElement(c_count);
AttributeSet attr = content_el.getAttributes();
// Get the name of the style applied to this content element; may be null
String sn = (String) attr.getAttribute(StyleConstants.NameAttribute);
// Check if style name match
if (sn != null && sn.startsWith("Parameter")) {
// we extract the label.
JLabel l = (JLabel) StyleConstants.getComponent(attr);
if (l != null) {
params.add(l.getName());
}
}
}
}
template.setFormat(format);
template.setTokens(params);
}
protected void setTemplate(PrintfTemplate template) {
originalTemplate = template;
editableTemplate.setFormat(template.m_format);
editableTemplate.setTokens(template.m_tokens);
updatePane(selectedPane);
}
private void updateParams(EDPCellData data) {
paramComboBox.removeAllItems();
paramKeys = data.getPlugin().getPrintfDescrs(!m_isCrawlRuleEditor);
if (!m_isCrawlRuleEditor) {
paramComboBox.addItem(STRING_LITERAL);
}
for (Iterator it = paramKeys.values().iterator() ; it.hasNext() ; ) {
ConfigParamDescr descr = (ConfigParamDescr)it.next();
int type = descr.getType();
if(!m_isCrawlRuleEditor && (type == ConfigParamDescr.TYPE_SET
|| type == ConfigParamDescr.TYPE_RANGE))
continue;
paramComboBox.addItem(descr);
}
paramComboBox.setEnabled(true);
paramComboBox.setSelectedIndex(0);
paramComboBox.setToolTipText(
"Select a parameter to insert into the format string");
insertButton.setEnabled(true);
}
/**
* updatePane
*
* @param sel int
*/
private void updatePane(int sel) {
switch(sel) {
case 0: // editor view
updateEditorView();
break;
case 1: // printf view
updatePrintfView();
break;
}
}
private void updatePrintfView() {
formatTextArea.setText(editableTemplate.m_format);
parameterTextArea.setText(editableTemplate.getTokenString());
}
private void updateEditorView() {
editorPane.setText("");
numParameters = 0;
try {
java.util.List elements = editableTemplate.getPrintfElements();
for(Iterator it = elements.iterator(); it.hasNext(); ) {
PrintfUtil.PrintfElement el = (PrintfUtil.PrintfElement) it.next();
if(el.getFormat().equals(PrintfUtil.PrintfElement.FORMAT_NONE)) {
appendText(el.getElement(), PLAIN_ATTR);
}
else {
insertParameter((ConfigParamDescr)paramKeys.get(el.getElement()),
el.getFormat(),
editorPane.getDocument().getLength());
}
}
}
catch(Exception ex) {
JOptionPane.showMessageDialog(this, "Invalid Format: " + ex.getMessage(),
"Invalid Printf Format",
JOptionPane.ERROR_MESSAGE);
selectedPane = 1;
printfTabPane.setSelectedIndex(selectedPane);
updatePane(selectedPane);
}
}
/**
* Return a copy of the string with all reserved regexp chars
* escaped by backslash.
* @param str the string to add escapes to
* @return String return a string with escapes or "" if str is null
*/
private String escapeReservedChars(String str) {
if(str == null) return "";
StringBuilder sb = new StringBuilder();
for(int ci = 0; ci < str.length(); ci++) {
char ch = str.charAt(ci);
if(RESERVED_STRING.indexOf(ch) >=0) {
sb.append('\\');
}
sb.append(ch);
}
return escapePrintfChars(sb.toString());
}
private String escapePrintfChars(String str) {
if(str == null) return "";
StringBuilder sb = new StringBuilder();
for(int ci = 0; ci < str.length(); ci++) {
char ch = str.charAt(ci);
if(ch == '%') {
sb.append('%');
}
sb.append(ch);
}
return sb.toString();
}
}
class PrintfTemplateEditor_saveButton_actionAdapter
implements java.awt.event.ActionListener {
PrintfEditor adaptee;
PrintfTemplateEditor_saveButton_actionAdapter(PrintfEditor adaptee) {
this.adaptee = adaptee;
}
public void actionPerformed(ActionEvent e) {
adaptee.saveButton_actionPerformed(e);
}
}
class PrintfTemplateEditor_cancelButton_actionAdapter
implements java.awt.event.ActionListener {
PrintfEditor adaptee;
PrintfTemplateEditor_cancelButton_actionAdapter(PrintfEditor adaptee) {
this.adaptee = adaptee;
}
public void actionPerformed(ActionEvent e) {
adaptee.cancelButton_actionPerformed(e);
}
}
class PrintfTemplateEditor_insertButton_actionAdapter
implements java.awt.event.ActionListener {
PrintfEditor adaptee;
PrintfTemplateEditor_insertButton_actionAdapter(PrintfEditor adaptee) {
this.adaptee = adaptee;
}
public void actionPerformed(ActionEvent e) {
adaptee.insertButton_actionPerformed(e);
}
}
class PrintfTemplateEditor_insertMatchButton_actionAdapter
implements java.awt.event.ActionListener {
PrintfEditor adaptee;
PrintfTemplateEditor_insertMatchButton_actionAdapter(PrintfEditor adaptee) {
this.adaptee = adaptee;
}
public void actionPerformed(ActionEvent e) {
adaptee.insertMatchButton_actionPerformed(e);
}
}
class PrintfEditor_printfTabPane_changeAdapter implements javax.swing.event.ChangeListener {
PrintfEditor adaptee;
PrintfEditor_printfTabPane_changeAdapter(PrintfEditor adaptee) {
this.adaptee = adaptee;
}
public void stateChanged(ChangeEvent e) {
adaptee.printfTabPane_stateChanged(e);
}
}
class PrintfEditor_editorPane_keyAdapter extends java.awt.event.KeyAdapter {
PrintfEditor adaptee;
PrintfEditor_editorPane_keyAdapter(PrintfEditor adaptee) {
this.adaptee = adaptee;
}
public void keyPressed(KeyEvent e) {
adaptee.editorPane_keyPressed(e);
}
}
class MyCellRenderer extends JLabel implements ListCellRenderer {
public MyCellRenderer() {
setOpaque(true);
}
public Component getListCellRendererComponent(JList list, Object value,
int index, boolean isSelected,
boolean cellHasFocus) {
if (isSelected) {
setBackground(list.getSelectionBackground());
setForeground(list.getSelectionForeground());
}
else {
setBackground(list.getBackground());
setForeground(list.getForeground());
}
if(value instanceof ConfigParamDescr) {
ConfigParamDescr descr = (ConfigParamDescr) value;
setText(descr.getDisplayName());
}
else {
setText(value.toString());
}
return this;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.history;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.history.VcsRevisionNumber;
import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList;
import com.intellij.openapi.vcs.versionBrowser.VcsRevisionNumberAware;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ConstantFunction;
import com.intellij.util.NotNullFunction;
import com.intellij.util.UriUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.*;
import org.jetbrains.idea.svn.api.Depth;
import org.jetbrains.idea.svn.commandLine.SvnBindException;
import org.jetbrains.idea.svn.info.Info;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.util.SVNPathUtil;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc2.SvnTarget;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class SvnChangeList implements CommittedChangeList, VcsRevisionNumberAware {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.idea.svn.history");
private final SvnVcs myVcs;
private final SvnRepositoryLocation myLocation;
private String myRepositoryRoot;
private long myRevision;
private VcsRevisionNumber myRevisionNumber;
private String myAuthor;
private Date myDate;
private String myMessage;
private final Set<String> myChangedPaths = new HashSet<>();
private final Set<String> myAddedPaths = new HashSet<>();
private final Set<String> myDeletedPaths = new HashSet<>();
private final Set<String> myReplacedPaths = new HashSet<>();
private ChangesListCreationHelper myListsHolder;
private SVNURL myBranchUrl;
private boolean myCachedInfoLoaded;
// key: added path, value: copied-from
private final TreeMap<String, String> myCopiedAddedPaths = new TreeMap<>();
private RootUrlInfo myWcRoot;
private final CommonPathSearcher myCommonPathSearcher;
private final Set<String> myKnownAsDirectories;
public SvnChangeList(@NotNull final List<CommittedChangeList> lists, @NotNull final SvnRepositoryLocation location) {
final SvnChangeList sample = (SvnChangeList) lists.get(0);
myVcs = sample.myVcs;
myLocation = location;
setRevision(sample.myRevision);
myAuthor = sample.myAuthor;
myDate = sample.myDate;
myMessage = sample.myMessage;
myRepositoryRoot = sample.myRepositoryRoot;
myCommonPathSearcher = new CommonPathSearcher();
for (CommittedChangeList list : lists) {
final SvnChangeList svnList = (SvnChangeList) list;
myChangedPaths.addAll(svnList.myChangedPaths);
myAddedPaths.addAll(svnList.myAddedPaths);
myDeletedPaths.addAll(svnList.myDeletedPaths);
myReplacedPaths.addAll(svnList.myReplacedPaths);
}
myKnownAsDirectories = new HashSet<>(0);
}
public SvnChangeList(SvnVcs vcs, @NotNull final SvnRepositoryLocation location, final LogEntry logEntry, String repositoryRoot) {
myVcs = vcs;
myLocation = location;
setRevision(logEntry.getRevision());
myAuthor = StringUtil.notNullize(logEntry.getAuthor());
myDate = logEntry.getDate();
myMessage = StringUtil.notNullize(logEntry.getMessage());
myRepositoryRoot = UriUtil.trimTrailingSlashes(repositoryRoot);
myCommonPathSearcher = new CommonPathSearcher();
myKnownAsDirectories = new HashSet<>(0);
for(LogEntryPath entry : logEntry.getChangedPaths().values()) {
final String path = entry.getPath();
if (entry.isDirectory()) {
myKnownAsDirectories.add(path);
}
myCommonPathSearcher.next(path);
if (entry.getType() == 'A') {
if (entry.getCopyPath() != null) {
myCopiedAddedPaths.put(path, entry.getCopyPath());
}
myAddedPaths.add(path);
}
else if (entry.getType() == 'D') {
myDeletedPaths.add(path);
}
else {
if (entry.getType() == 'R') {
myReplacedPaths.add(path);
}
myChangedPaths.add(path);
}
}
}
public SvnChangeList(SvnVcs vcs, @NotNull SvnRepositoryLocation location, @NotNull DataInput stream, final boolean supportsCopyFromInfo,
final boolean supportsReplaced) throws IOException {
myVcs = vcs;
myLocation = location;
myKnownAsDirectories = new HashSet<>();
readFromStream(stream, supportsCopyFromInfo, supportsReplaced);
myCommonPathSearcher = new CommonPathSearcher();
myCommonPathSearcher.next(myAddedPaths);
myCommonPathSearcher.next(myDeletedPaths);
myCommonPathSearcher.next(myChangedPaths);
}
public Change getByPath(final String path) {
if (myListsHolder == null) {
createLists();
}
return myListsHolder.getByPath(path);
}
public String getCommitterName() {
return myAuthor;
}
@Nullable
public Date getCommitDate() {
return myDate;
}
@Nullable
@Override
public VcsRevisionNumber getRevisionNumber() {
return myRevisionNumber;
}
private void setRevision(long revision) {
myRevision = revision;
myRevisionNumber = new SvnRevisionNumber(SVNRevision.create(revision));
}
public Collection<Change> getChanges() {
if (myListsHolder == null) {
createLists();
}
return myListsHolder.getList();
}
private void createLists() {
myListsHolder = new ChangesListCreationHelper();
// key: copied-from
final Map<String, ExternallyRenamedChange> copiedAddedChanges = new HashMap<>();
correctBeforePaths();
final List<String> copyDeleted = new ArrayList<>(myDeletedPaths);
for(String path: myAddedPaths) {
final Change addedChange;
if (myCopiedAddedPaths.containsKey(path)) {
final String copyTarget = myCopiedAddedPaths.get(path);
if (copyDeleted.contains(copyTarget)) {
addedChange = new ExternallyRenamedChange(myListsHolder.createRevisionLazily(copyTarget, true),
myListsHolder.createRevisionLazily(path, false), copyTarget);
addedChange.getMoveRelativePath(myVcs.getProject());
((ExternallyRenamedChange) addedChange).setCopied(false);
copyDeleted.remove(copyTarget);
} else {
addedChange = new ExternallyRenamedChange(null, myListsHolder.createRevisionLazily(path, false), copyTarget);
}
copiedAddedChanges.put(copyTarget, (ExternallyRenamedChange) addedChange);
} else {
addedChange = new Change(null, myListsHolder.createRevisionLazily(path, false));
}
myListsHolder.add(path, addedChange);
}
for(String path: copyDeleted) {
final Change deletedChange;
if (copiedAddedChanges.containsKey(path)) {
// seems never occurs any more
final ExternallyRenamedChange addedChange = copiedAddedChanges.get(path);
final FilePath source = addedChange.getAfterRevision().getFile();
deletedChange = new ExternallyRenamedChange(myListsHolder.createDeletedItemRevision(path, true), null, path);
((ExternallyRenamedChange) deletedChange).setCopied(false);
//noinspection ConstantConditions
//addedChange.setRenamedOrMovedTarget(deletedChange.getBeforeRevision().getFile());
//noinspection ConstantConditions
((ExternallyRenamedChange) deletedChange).setRenamedOrMovedTarget(source);
} else {
deletedChange = new Change(myListsHolder.createDeletedItemRevision(path, true), null);
}
myListsHolder.add(path, deletedChange);
}
for(String path: myChangedPaths) {
boolean moveAndChange = false;
final boolean replaced = myReplacedPaths.contains(path);
// this piece: for copied-from (or moved) and further modified
for (String addedPath : myAddedPaths) {
String copyFromPath = myCopiedAddedPaths.get(addedPath);
if ((copyFromPath != null) && (SVNPathUtil.isAncestor(addedPath, path))) {
if (addedPath.length() < path.length()) {
final String relative = SVNPathUtil.getRelativePath(addedPath, path);
copyFromPath = SVNPathUtil.append(copyFromPath, relative);
}
final ExternallyRenamedChange renamedChange = new ExternallyRenamedChange(myListsHolder.createRevisionLazily(copyFromPath, true),
myListsHolder.createRevisionLazily(path, false), copyFromPath);
moveAndChange = true;
renamedChange.getMoveRelativePath(myVcs.getProject());
renamedChange.setIsReplaced(replaced);
final ExternallyRenamedChange addedChange = copiedAddedChanges.get(myCopiedAddedPaths.get(addedPath));
renamedChange.setCopied(addedChange != null && addedChange.isCopied());
myListsHolder.add(path, renamedChange);
break;
}
}
if (! moveAndChange) {
final ExternallyRenamedChange renamedChange =
new ExternallyRenamedChange(myListsHolder.createRevisionLazily(path, true), myListsHolder.createRevisionLazily(path, false),
null);
renamedChange.setIsReplaced(replaced);
renamedChange.setCopied(false);
myListsHolder.add(path, renamedChange);
}
}
}
private void correctBeforePaths() {
processDeletedForBeforePaths(myDeletedPaths);
processModifiedForBeforePaths(myChangedPaths);
processModifiedForBeforePaths(myReplacedPaths);
}
private void processModifiedForBeforePaths(Set<String> paths) {
final RenameHelper helper = new RenameHelper();
for (String s : paths) {
final String converted = helper.convertBeforePath(s, myCopiedAddedPaths);
if (! s.equals(converted)) {
myCopiedAddedPaths.put(s, converted);
}
}
}
private void processDeletedForBeforePaths(Set<String> paths) {
final RenameHelper helper = new RenameHelper();
final HashSet<String> copy = new HashSet<>(paths);
paths.clear();
for (String s : copy) {
paths.add(helper.convertBeforePath(s, myCopiedAddedPaths));
}
}
@Nullable
private FilePath getLocalPath(final String path, final NotNullFunction<File, Boolean> detector) {
return SvnRepositoryLocation.getLocalPath(myRepositoryRoot + path, detector, myVcs);
}
private long getRevision(final boolean isBeforeRevision) {
return isBeforeRevision ? (myRevision - 1) : myRevision;
}
public SvnRepositoryLocation getLocation() {
return myLocation;
}
/**
* needed to track in which changes non-local files live
*/
private class ChangesListCreationHelper {
private final List<Change> myList;
private final Map<String, Change> myPathToChangeMapping;
private List<Change> myDetailedList;
private final List<Pair<Integer, Boolean>> myWithoutDirStatus;
private ChangesListCreationHelper() {
myList = new ArrayList<>();
myWithoutDirStatus = new ArrayList<>();
myPathToChangeMapping = new HashMap<>();
}
public void add(final String path, final Change change) {
patchChange(change, path);
myList.add(change);
myPathToChangeMapping.put(path, change);
}
public Change getByPath(final String path) {
return myPathToChangeMapping.get(path);
}
private FilePath localDeletedPath(@NotNull String fullPath, final boolean isDir) {
final SvnFileUrlMapping urlMapping = myVcs.getSvnFileUrlMapping();
final File file = urlMapping.getLocalPath(fullPath);
if (file != null) {
return VcsUtil.getFilePath(file.getAbsolutePath(), isDir || file.isDirectory());
}
return null;
}
public SvnRepositoryContentRevision createDeletedItemRevision(final String path, final boolean isBeforeRevision) {
final boolean knownAsDirectory = myKnownAsDirectories.contains(path);
final String fullPath = myRepositoryRoot + path;
if (! knownAsDirectory) {
myWithoutDirStatus.add(Pair.create(myList.size(), isBeforeRevision));
}
return SvnRepositoryContentRevision.create(myVcs, myRepositoryRoot, path, localDeletedPath(fullPath, knownAsDirectory),
getRevision(isBeforeRevision));
}
public SvnRepositoryContentRevision createRevisionLazily(final String path, final boolean isBeforeRevision) {
final boolean knownAsDirectory = myKnownAsDirectories.contains(path);
final FilePath localPath = getLocalPath(path, file -> {
if (knownAsDirectory) return Boolean.TRUE;
// list will be next
myWithoutDirStatus.add(new Pair<>(myList.size(), isBeforeRevision));
return Boolean.FALSE;
});
long revision = getRevision(isBeforeRevision);
return localPath == null
? SvnRepositoryContentRevision.createForRemotePath(myVcs, myRepositoryRoot, path, knownAsDirectory, revision)
: SvnRepositoryContentRevision.create(myVcs, myRepositoryRoot, path, localPath, revision);
}
public List<Change> getList() {
return myList;
}
public List<Change> getDetailedList() {
if (myDetailedList == null) {
myDetailedList = new ArrayList<>(myList);
try {
doRemoteDetails();
uploadDeletedRenamedChildren();
ContainerUtil.removeDuplicates(myDetailedList);
}
catch (SVNException | VcsException e) {
LOG.info(e);
}
}
return myDetailedList;
}
private void doRemoteDetails() throws SVNException, SvnBindException {
for (Pair<Integer, Boolean> idxData : myWithoutDirStatus) {
final Change sourceChange = myDetailedList.get(idxData.first.intValue());
final SvnRepositoryContentRevision revision = (SvnRepositoryContentRevision)
(idxData.second.booleanValue() ? sourceChange.getBeforeRevision() : sourceChange.getAfterRevision());
if (revision == null) {
continue;
}
// TODO: Logic with detecting "isDirectory" status is not clear enough. Why we can't just collect this info from logEntry and
// TODO: if loading from disk - use cached values? Not to invoke separate call here.
SVNRevision beforeRevision = SVNRevision.create(getRevision(idxData.second.booleanValue()));
Info info = myVcs.getInfo(SvnUtil.createUrl(revision.getFullPath()), beforeRevision, beforeRevision);
boolean isDirectory = info != null && info.isDirectory();
Change replacingChange = new Change(createRevision((SvnRepositoryContentRevision)sourceChange.getBeforeRevision(), isDirectory),
createRevision((SvnRepositoryContentRevision)sourceChange.getAfterRevision(), isDirectory));
replacingChange.setIsReplaced(sourceChange.isIsReplaced());
myDetailedList.set(idxData.first.intValue(), replacingChange);
}
myWithoutDirStatus.clear();
}
@Nullable
private SvnRepositoryContentRevision createRevision(final SvnRepositoryContentRevision previousRevision, final boolean isDir) {
return previousRevision == null ? null :
SvnRepositoryContentRevision.create(myVcs, previousRevision.getFullPath(),
VcsUtil.getFilePath(previousRevision.getFile().getPath(), isDir),
previousRevision.getRevisionNumber().getRevision().getNumber());
}
private void uploadDeletedRenamedChildren() throws VcsException {
Set<Pair<Boolean, String>> duplicates = collectDuplicates();
List<Change> preprocessed = ChangesPreprocess.preprocessChangesRemoveDeletedForDuplicateMoved(myDetailedList);
myDetailedList.addAll(collectDetails(preprocessed, duplicates));
}
private List<Change> collectDetails(@NotNull List<Change> changes, @NotNull Set<Pair<Boolean, String>> duplicates)
throws VcsException {
List<Change> result = ContainerUtil.newArrayList();
for (Change change : changes) {
// directory statuses are already uploaded
if ((change.getAfterRevision() == null) && (change.getBeforeRevision().getFile().isDirectory())) {
result.addAll(getChildrenAsChanges(change.getBeforeRevision(), true, duplicates));
} else if ((change.getBeforeRevision() == null) && (change.getAfterRevision().getFile().isDirectory())) {
// look for renamed folders contents
if (myCopiedAddedPaths.containsKey(getRelativePath(change.getAfterRevision()))) {
result.addAll(getChildrenAsChanges(change.getAfterRevision(), false, duplicates));
}
} else if ((change.isIsReplaced() || change.isMoved() || change.isRenamed()) && change.getAfterRevision().getFile().isDirectory()) {
result.addAll(getChildrenAsChanges(change.getBeforeRevision(), true, duplicates));
result.addAll(getChildrenAsChanges(change.getAfterRevision(), false, duplicates));
}
}
return result;
}
private Set<Pair<Boolean, String>> collectDuplicates() {
Set<Pair<Boolean, String>> result = ContainerUtil.newHashSet();
for (Change change : myDetailedList) {
addDuplicate(result, true, change.getBeforeRevision());
addDuplicate(result, false, change.getAfterRevision());
}
return result;
}
private void addDuplicate(@NotNull Set<Pair<Boolean, String>> duplicates,
boolean isBefore,
@Nullable ContentRevision revision) {
if (revision != null) {
duplicates.add(Pair.create(isBefore, getRelativePath(revision)));
}
}
@NotNull
private String getRelativePath(@NotNull ContentRevision revision) {
return ((SvnRepositoryContentRevision)revision).getRelativePath(myRepositoryRoot);
}
@NotNull
private Collection<Change> getChildrenAsChanges(@NotNull ContentRevision contentRevision,
final boolean isBefore,
@NotNull final Set<Pair<Boolean, String>> duplicates)
throws VcsException {
final List<Change> result = new ArrayList<>();
final String path = getRelativePath(contentRevision);
SVNURL fullPath = SvnUtil.createUrl(((SvnRepositoryContentRevision)contentRevision).getFullPath());
SVNRevision revisionNumber = SVNRevision.create(getRevision(isBefore));
SvnTarget target = SvnTarget.fromURL(fullPath, revisionNumber);
myVcs.getFactory(target).createBrowseClient().list(target, revisionNumber, Depth.INFINITY, entry -> {
final String childPath = path + '/' + entry.getRelativePath();
if (!duplicates.contains(Pair.create(isBefore, childPath))) {
final ContentRevision contentRevision1 = createRevision(childPath, isBefore, entry.isDirectory());
result.add(new Change(isBefore ? contentRevision1 : null, isBefore ? null : contentRevision1));
}
});
return result;
}
private SvnRepositoryContentRevision createRevision(final String path, final boolean isBeforeRevision, final boolean isDir) {
return SvnRepositoryContentRevision.create(myVcs, myRepositoryRoot, path,
getLocalPath(path, new ConstantFunction<>(isDir)), getRevision(isBeforeRevision));
}
}
private static class RenameHelper {
public String convertBeforePath(final String path, final TreeMap<String, String> after2before) {
String current = path;
// backwards
for (String key : after2before.descendingKeySet()) {
if (SVNPathUtil.isAncestor(key, current)) {
final String relativePath = SVNPathUtil.getRelativePath(key, current);
current = SVNPathUtil.append(after2before.get(key), relativePath);
}
}
return current;
}
}
private void patchChange(Change change, final String path) {
final SVNURL becameUrl;
SVNURL wasUrl;
try {
becameUrl = SVNURL.parseURIEncoded(SVNPathUtil.append(myRepositoryRoot, path));
wasUrl = becameUrl;
if (change instanceof ExternallyRenamedChange && change.getBeforeRevision() != null) {
String originUrl = ((ExternallyRenamedChange)change).getOriginUrl();
if (originUrl != null) {
// use another url for origin
wasUrl = SVNURL.parseURIEncoded(SVNPathUtil.append(myRepositoryRoot, originUrl));
}
}
}
catch (SVNException e) {
// nothing to do
LOG.info(e);
return;
}
final FilePath filePath = ChangesUtil.getFilePath(change);
final Change additional = new Change(createPropertyRevision(filePath, change.getBeforeRevision(), wasUrl),
createPropertyRevision(filePath, change.getAfterRevision(), becameUrl));
change.addAdditionalLayerElement(SvnChangeProvider.PROPERTY_LAYER, additional);
}
@Nullable
private SvnLazyPropertyContentRevision createPropertyRevision(@NotNull FilePath filePath,
@Nullable ContentRevision revision,
@NotNull SVNURL url) {
return revision == null ? null : new SvnLazyPropertyContentRevision(myVcs, filePath, revision.getRevisionNumber(), url);
}
@NotNull
public String getName() {
return myMessage;
}
public String getComment() {
return myMessage;
}
public long getNumber() {
return myRevision;
}
@Override
public String getBranch() {
return null;
}
public AbstractVcs getVcs() {
return myVcs;
}
public Collection<Change> getChangesWithMovedTrees() {
if (myListsHolder == null) {
createLists();
}
return myListsHolder.getDetailedList();
}
@Override
public boolean isModifiable() {
return true;
}
@Override
public void setDescription(String newMessage) {
myMessage = newMessage;
}
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final SvnChangeList that = (SvnChangeList)o;
if (myRevision != that.myRevision) return false;
if (myAuthor != null ? !myAuthor.equals(that.myAuthor) : that.myAuthor != null) return false;
if (myDate != null ? !myDate.equals(that.myDate) : that.myDate != null) return false;
if (myMessage != null ? !myMessage.equals(that.myMessage) : that.myMessage != null) return false;
return true;
}
public int hashCode() {
int result;
result = (int)(myRevision ^ (myRevision >>> 32));
result = 31 * result + (myAuthor != null ? myAuthor.hashCode() : 0);
result = 31 * result + (myDate != null ? myDate.hashCode() : 0);
result = 31 * result + (myMessage != null ? myMessage.hashCode() : 0);
return result;
}
public String toString() {
return myMessage;
}
public void writeToStream(@NotNull DataOutput stream) throws IOException {
stream.writeUTF(myRepositoryRoot);
stream.writeLong(myRevision);
stream.writeUTF(myAuthor);
stream.writeLong(myDate.getTime());
writeUTFTruncated(stream, myMessage);
writeFiles(stream, myChangedPaths);
writeFiles(stream, myAddedPaths);
writeFiles(stream, myDeletedPaths);
writeMap(stream, myCopiedAddedPaths);
writeFiles(stream, myReplacedPaths);
stream.writeInt(myKnownAsDirectories.size());
for (String directory : myKnownAsDirectories) {
stream.writeUTF(directory);
}
}
// to be able to update plugin only
public static void writeUTFTruncated(final DataOutput stream, final String text) throws IOException {
// we should not compare number of symbols to 65635 -> it is number of bytes what should be compared
// ? 4 bytes per symbol - rough estimation
if (text.length() > 16383) {
stream.writeUTF(text.substring(0, 16383));
}
else {
stream.writeUTF(text);
}
}
private static void writeFiles(final DataOutput stream, final Set<String> paths) throws IOException {
stream.writeInt(paths.size());
for(String s: paths) {
stream.writeUTF(s);
}
}
private void readFromStream(@NotNull DataInput stream, final boolean supportsCopyFromInfo, final boolean supportsReplaced)
throws IOException {
myRepositoryRoot = stream.readUTF();
setRevision(stream.readLong());
myAuthor = stream.readUTF();
myDate = new Date(stream.readLong());
myMessage = stream.readUTF();
readFiles(stream, myChangedPaths);
readFiles(stream, myAddedPaths);
readFiles(stream, myDeletedPaths);
if (supportsCopyFromInfo) {
readMap(stream, myCopiedAddedPaths);
}
if (supportsReplaced) {
readFiles(stream, myReplacedPaths);
}
final int size = stream.readInt();
for (int i = 0; i < size; i++) {
myKnownAsDirectories.add(stream.readUTF());
}
}
private static void writeMap(final DataOutput stream, final Map<String, String> map) throws IOException {
stream.writeInt(map.size());
for (Map.Entry<String, String> entry : map.entrySet()) {
stream.writeUTF(entry.getKey());
stream.writeUTF(entry.getValue());
}
}
private static void readMap(final DataInput stream, final Map<String, String> map) throws IOException {
int count = stream.readInt();
for (int i = 0; i < count; i++) {
map.put(stream.readUTF(), stream.readUTF());
}
}
private static void readFiles(final DataInput stream, final Set<String> paths) throws IOException {
int count = stream.readInt();
for(int i=0; i<count; i++) {
paths.add(stream.readUTF());
}
}
public SVNURL getBranchUrl() {
ensureCacheUpdated();
return myBranchUrl;
}
@Nullable
public VirtualFile getVcsRoot() {
ensureCacheUpdated();
return myWcRoot == null ? null : myWcRoot.getRoot();
}
@Nullable
public VirtualFile getRoot() {
ensureCacheUpdated();
return myWcRoot == null ? null : myWcRoot.getVirtualFile();
}
public RootUrlInfo getWcRootInfo() {
ensureCacheUpdated();
return myWcRoot;
}
private void ensureCacheUpdated() {
if (!myCachedInfoLoaded) {
updateCachedInfo();
}
}
private static class CommonPathSearcher {
private String myCommon;
public void next(Iterable<String> values) {
for (String value : values) {
next(value);
}
}
public void next(final String value) {
if (value == null) {
return;
}
if (myCommon == null) {
myCommon = value;
return;
}
if (value.startsWith(myCommon)) {
return;
}
myCommon = SVNPathUtil.getCommonPathAncestor(myCommon, value);
}
public String getCommon() {
return myCommon;
}
}
private void updateCachedInfo() {
myCachedInfoLoaded = true;
final String commonPath = myCommonPathSearcher.getCommon();
if (commonPath != null) {
final SvnFileUrlMapping urlMapping = myVcs.getSvnFileUrlMapping();
if (urlMapping.isEmpty()) {
myCachedInfoLoaded = false;
return;
}
final String absoluteUrl = SVNPathUtil.append(myRepositoryRoot, commonPath);
myWcRoot = urlMapping.getWcRootForUrl(absoluteUrl);
if (myWcRoot != null) {
myBranchUrl = SvnUtil.getBranchForUrl(myVcs, myWcRoot.getVirtualFile(), absoluteUrl);
}
}
}
public void forceReloadCachedInfo() {
myCachedInfoLoaded = false;
myBranchUrl = null;
myWcRoot = null;
}
@NotNull
public Set<String> getAffectedPaths() {
return ContainerUtil.newHashSet(ContainerUtil.concat(myAddedPaths, myDeletedPaths, myChangedPaths));
}
@Nullable
public String getWcPath() {
final RootUrlInfo rootInfo = getWcRootInfo();
return rootInfo == null ? null : rootInfo.getIoFile().getAbsolutePath();
}
public boolean allPathsUnder(final String path) {
final String commonRelative = myCommonPathSearcher.getCommon();
return commonRelative != null && SVNPathUtil.isAncestor(path, SVNPathUtil.append(myRepositoryRoot, commonRelative));
}
}
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.openapi.externalSystem.util;
import com.intellij.execution.rmi.RemoteUtil;
import com.intellij.ide.highlighter.ArchiveFileType;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.*;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.externalSystem.ExternalSystemAutoImportAware;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.ExternalSystemModulePropertyManager;
import com.intellij.openapi.externalSystem.ExternalSystemUiAware;
import com.intellij.openapi.externalSystem.model.*;
import com.intellij.openapi.externalSystem.model.project.ExternalSystemSourceType;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.ModuleData;
import com.intellij.openapi.externalSystem.model.project.ProjectData;
import com.intellij.openapi.externalSystem.model.settings.ExternalSystemExecutionSettings;
import com.intellij.openapi.externalSystem.model.task.TaskData;
import com.intellij.openapi.externalSystem.service.project.ProjectDataManager;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemLocalSettings;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings;
import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings;
import com.intellij.openapi.externalSystem.settings.ExternalSystemSettingsListener;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ExternalProjectSystemRegistry;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.ProjectModelExternalSource;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.BooleanFunction;
import com.intellij.util.NullableFunction;
import com.intellij.util.PathsList;
import com.intellij.util.SmartList;
import com.intellij.util.concurrency.EdtExecutorService;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.*;
import java.io.File;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.*;
import java.util.function.Consumer;
import java.util.function.Predicate;
public final class ExternalSystemApiUtil {
public static final @NotNull String PATH_SEPARATOR = "/";
public static final @NotNull Comparator<Object> ORDER_AWARE_COMPARATOR = new Comparator<>() {
@Override
public int compare(@NotNull Object o1, @NotNull Object o2) {
int order1 = getOrder(o1);
int order2 = getOrder(o2);
return Integer.compare(order1, order2);
}
private int getOrder(@NotNull Object o) {
Queue<Class<?>> toCheck = new ArrayDeque<>();
toCheck.add(o.getClass());
while (!toCheck.isEmpty()) {
Class<?> clazz = toCheck.poll();
Order annotation = clazz.getAnnotation(Order.class);
if (annotation != null) {
return annotation.value();
}
Class<?> c = clazz.getSuperclass();
if (c != null) {
toCheck.add(c);
}
Class<?>[] interfaces = clazz.getInterfaces();
Collections.addAll(toCheck, interfaces);
}
return ExternalSystemConstants.UNORDERED;
}
};
private static final @NotNull NullableFunction<DataNode<?>, Key<?>> GROUPER = node -> node.getKey();
private ExternalSystemApiUtil() {
}
public static @NotNull String extractNameFromPath(@NotNull String path) {
String strippedPath = stripPath(path);
final int i = strippedPath.lastIndexOf(PATH_SEPARATOR);
final String result;
if (i < 0 || i >= strippedPath.length() - 1) {
result = strippedPath;
}
else {
result = strippedPath.substring(i + 1);
}
return result;
}
private static @NotNull String stripPath(@NotNull String path) {
String[] endingsToStrip = {"/", "!", ".jar"};
StringBuilder buffer = new StringBuilder(path);
for (String ending : endingsToStrip) {
if (buffer.lastIndexOf(ending) == buffer.length() - ending.length()) {
buffer.setLength(buffer.length() - ending.length());
}
}
return buffer.toString();
}
public static @NotNull String getLibraryName(@NotNull Library library) {
final String result = library.getName();
if (result != null) {
return result;
}
for (OrderRootType type : OrderRootType.getAllTypes()) {
for (String url : library.getUrls(type)) {
String candidate = extractNameFromPath(url);
if (!StringUtil.isEmpty(candidate)) {
return candidate;
}
}
}
assert false;
return "unknown-lib";
}
public static boolean isRelated(@NotNull Library library, @NotNull LibraryData libraryData) {
return getLibraryName(library).equals(libraryData.getInternalName());
}
public static boolean isExternalSystemLibrary(@NotNull Library library, @NotNull ProjectSystemId externalSystemId) {
return library.getName() != null && StringUtil.startsWithIgnoreCase(library.getName(), externalSystemId.getId() + ": ");
}
public static void orderAwareSort(@NotNull List<?> data) {
data.sort(ORDER_AWARE_COMPARATOR);
}
/**
* @param path target path
* @return path that points to the same location as the given one and that uses only slashes
*/
public static @NotNull String toCanonicalPath(@NotNull String path) {
String p = normalizePath(path);
assert p != null;
return FileUtil.toCanonicalPath(p);
}
public static @NotNull String getLocalFileSystemPath(@NotNull VirtualFile file) {
if (FileTypeRegistry.getInstance().isFileOfType(file, ArchiveFileType.INSTANCE)) {
final VirtualFile jar = JarFileSystem.getInstance().getVirtualFileForJar(file);
if (jar != null) {
return jar.getPath();
}
}
return toCanonicalPath(file.getPath());
}
public static @Nullable ExternalSystemManager<?, ?, ?, ?, ?> getManager(@NotNull ProjectSystemId externalSystemId) {
return ExternalSystemManager.EP_NAME.findFirstSafe(manager -> externalSystemId.equals(manager.getSystemId()));
}
public static @NotNull List<ExternalSystemManager<?, ?, ?, ?, ?>> getAllManagers() {
return ExternalSystemManager.EP_NAME.getExtensionList();
}
public static MultiMap<Key<?>, DataNode<?>> recursiveGroup(@NotNull Collection<? extends DataNode<?>> nodes) {
MultiMap<Key<?>, DataNode<?>> result = new ContainerUtil.KeyOrderedMultiMap<>();
Queue<Collection<? extends DataNode<?>>> queue = new LinkedList<>();
queue.add(nodes);
while (!queue.isEmpty()) {
Collection<? extends DataNode<?>> _nodes = queue.remove();
result.putAllValues(group(_nodes));
for (DataNode<?> _node : _nodes) {
queue.add(_node.getChildren());
}
}
return result;
}
public static @NotNull MultiMap<Key<?>, DataNode<?>> group(@NotNull Collection<? extends DataNode<?>> nodes) {
return ContainerUtil.groupBy(nodes, GROUPER);
}
public static @NotNull <K, V> MultiMap<DataNode<K>, DataNode<V>> groupBy(@NotNull Collection<? extends DataNode<V>> nodes, final Class<K> moduleDataClass) {
return ContainerUtil.groupBy(nodes, node -> node.getParent(moduleDataClass));
}
public static @NotNull <K, V> MultiMap<DataNode<K>, DataNode<V>> groupBy(@NotNull Collection<? extends DataNode<V>> nodes, final @NotNull Key<K> key) {
return ContainerUtil.groupBy(nodes, node -> node.getDataNode(key));
}
@SuppressWarnings("unchecked")
public static @NotNull <T> Collection<DataNode<T>> getChildren(@NotNull DataNode<?> node, @NotNull Key<T> key) {
Collection<DataNode<T>> result = null;
for (DataNode<?> child : node.getChildren()) {
if (!key.equals(child.getKey())) {
continue;
}
if (result == null) {
result = new ArrayList<>();
}
result.add((DataNode<T>)child);
}
return result == null ? Collections.emptyList() : result;
}
@SuppressWarnings("unchecked")
public static @Nullable <T> DataNode<T> find(@NotNull DataNode<?> node, @NotNull Key<T> key) {
for (DataNode<?> child : node.getChildren()) {
if (key.equals(child.getKey())) {
return (DataNode<T>)child;
}
}
return null;
}
@SuppressWarnings("unchecked")
public static @Nullable <T> DataNode<T> find(@NotNull DataNode<?> node, @NotNull Key<T> key, BooleanFunction<? super DataNode<T>> predicate) {
for (DataNode<?> child : node.getChildren()) {
if (key.equals(child.getKey()) && predicate.fun((DataNode<T>)child)) {
return (DataNode<T>)child;
}
}
return null;
}
public static @Nullable <T> DataNode<T> findParent(@NotNull DataNode<?> node, @NotNull Key<T> key) {
return findParent(node, key, null);
}
@SuppressWarnings("unchecked")
public static @Nullable <T> DataNode<T> findParent(@NotNull DataNode<?> node,
@NotNull Key<T> key,
@Nullable BooleanFunction<? super DataNode<T>> predicate) {
DataNode<?> parent = node.getParent();
if (parent == null) return null;
return key.equals(parent.getKey()) && (predicate == null || predicate.fun((DataNode<T>)parent))
? (DataNode<T>)parent : findParent(parent, key, predicate);
}
public static @NotNull <T> Collection<DataNode<T>> findAll(@NotNull DataNode<?> parent, @NotNull Key<T> key) {
return getChildren(parent, key);
}
public static void visit(@Nullable DataNode<?> originalNode, @NotNull Consumer<? super DataNode<?>> consumer) {
if (originalNode != null) {
originalNode.visit(consumer);
}
}
public static @NotNull <T> Collection<DataNode<T>> findAllRecursively(@Nullable DataNode<?> node, @NotNull Key<T> key) {
if (node == null) {
return Collections.emptyList();
}
//noinspection unchecked
return (Collection)findAllRecursively(node.getChildren(), it -> it.getKey().equals(key));
}
public static @NotNull Collection<DataNode<?>> findAllRecursively(@NotNull Collection<? extends DataNode<?>> nodes) {
return findAllRecursively(nodes, null);
}
public static @NotNull Collection<DataNode<?>> findAllRecursively(@Nullable DataNode<?> node,
@Nullable Predicate<? super DataNode<?>> predicate) {
if (node == null) return Collections.emptyList();
return findAllRecursively(node.getChildren(), predicate);
}
public static @NotNull Collection<DataNode<?>> findAllRecursively(@NotNull Collection<? extends DataNode<?>> nodes,
@Nullable Predicate<? super DataNode<?>> predicate) {
List<DataNode<?>> result = new ArrayList<>();
for (DataNode<?> node : nodes) {
if (predicate == null || predicate.test(node)) {
result.add(node);
}
}
for (DataNode<?> node : nodes) {
result.addAll(findAllRecursively(node.getChildren(), predicate));
}
return result;
}
public static @Nullable DataNode<?> findFirstRecursively(@NotNull DataNode<?> parentNode,
@NotNull Predicate<? super DataNode<?>> predicate) {
Queue<DataNode<?>> queue = new LinkedList<>();
queue.add(parentNode);
return findInQueue(queue, predicate);
}
public static @Nullable DataNode<?> findFirstRecursively(@NotNull Collection<? extends DataNode<?>> nodes,
@NotNull Predicate<? super DataNode<?>> predicate) {
return findInQueue(new LinkedList<>(nodes), predicate);
}
private static @Nullable DataNode<?> findInQueue(@NotNull Queue<DataNode<?>> queue,
@NotNull Predicate<? super DataNode<?>> predicate) {
while (!queue.isEmpty()) {
DataNode<?> node = queue.remove();
if (predicate.test(node)) {
return node;
}
queue.addAll(node.getChildren());
}
return null;
}
public static void executeProjectChangeAction(final @NotNull DisposeAwareProjectChange task) {
executeProjectChangeAction(true, task);
}
public static void executeProjectChangeAction(boolean synchronous, final @NotNull DisposeAwareProjectChange task) {
if (!ApplicationManager.getApplication().isDispatchThread()) {
TransactionGuard.getInstance().assertWriteSafeContext(ModalityState.defaultModalityState());
}
executeOnEdt(synchronous, () -> ApplicationManager.getApplication().runWriteAction(task));
}
public static void executeOnEdt(boolean synchronous, @NotNull Runnable task) {
final Application app = ApplicationManager.getApplication();
if (app.isDispatchThread()) {
task.run();
return;
}
if (synchronous) {
app.invokeAndWait(task);
}
else {
app.invokeLater(task);
}
}
public static <T> T executeOnEdt(final @NotNull Computable<T> task) {
final Application app = ApplicationManager.getApplication();
final Ref<T> result = Ref.create();
app.invokeAndWait(() -> result.set(task.compute()));
return result.get();
}
public static <T> T doWriteAction(final @NotNull Computable<T> task) {
return executeOnEdt(() -> ApplicationManager.getApplication().runWriteAction(task));
}
public static void doWriteAction(final @NotNull Runnable task) {
executeOnEdt(true, () -> ApplicationManager.getApplication().runWriteAction(task));
}
/**
* Adds runnable to Event Dispatch Queue
* if we aren't in UnitTest of Headless environment mode
*
* @param runnable Runnable
*/
public static void addToInvokeLater(final Runnable runnable) {
final Application application = ApplicationManager.getApplication();
final boolean unitTestMode = application.isUnitTestMode();
if (unitTestMode) {
UIUtil.invokeLaterIfNeeded(runnable);
}
else if (application.isHeadlessEnvironment() || application.isDispatchThread()) {
runnable.run();
}
else {
EdtExecutorService.getInstance().execute(runnable);
}
}
/**
* @deprecated there is no need to call this method since we don't put message bundles to separate resources_en.jar files (IDEA-255246)
*/
@Deprecated(forRemoval = true)
public static void addBundle(@NotNull PathsList classPath, @NotNull String bundlePath, @NotNull Class<?> contextClass) {
String pathToUse = bundlePath.replace('.', '/');
if (!pathToUse.endsWith(".properties")) {
pathToUse += ".properties";
}
if (!pathToUse.startsWith("/")) {
pathToUse = '/' + pathToUse;
}
String root = PathManager.getResourceRoot(contextClass, pathToUse);
if (root != null) {
classPath.add(root);
}
}
public static @Nullable String normalizePath(@Nullable String s) {
return s == null ? null : s.replace('\\', ExternalSystemConstants.PATH_SEPARATOR);
}
/**
* Allows to answer if given ide project has 1-1 mapping with the given external project, i.e. the ide project has been
* imported from external system and no other external projects have been added.
* <p/>
* This might be necessary in a situation when project-level setting is changed (e.g. project name). We don't want to rename
* ide project if it doesn't completely corresponds to the given ide project then.
*
* @param ideProject target ide project
* @param projectData target external project
* @param modules the list of modules to check (during import this contains uncommitted modules from the modifiable model)
* @return {@code true} if given ide project has 1-1 mapping to the given external project;
* {@code false} otherwise
*/
public static boolean isOneToOneMapping(@NotNull Project ideProject, @NotNull ProjectData projectData, Module[] modules) {
String linkedExternalProjectPath = null;
for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemManager.EP_NAME.getIterable()) {
ProjectSystemId externalSystemId = manager.getSystemId();
AbstractExternalSystemSettings systemSettings = getSettings(ideProject, externalSystemId);
Collection projectsSettings = systemSettings.getLinkedProjectsSettings();
int linkedProjectsNumber = projectsSettings.size();
if (linkedProjectsNumber > 1) {
// More than one external project of the same external system type is linked to the given ide project.
return false;
}
else if (linkedProjectsNumber == 1) {
if (linkedExternalProjectPath == null) {
// More than one external project of different external system types is linked to the current ide project.
linkedExternalProjectPath = ((ExternalProjectSettings)projectsSettings.iterator().next()).getExternalProjectPath();
}
else {
return false;
}
}
}
if (linkedExternalProjectPath != null && !linkedExternalProjectPath.equals(projectData.getLinkedExternalProjectPath())) {
// New external project is being linked.
return false;
}
for (Module module : modules) {
if (!isExternalSystemAwareModule(projectData.getOwner(), module)) {
return false;
}
}
return true;
}
public static @NotNull @NlsSafe String getProjectRepresentationName(@NotNull String targetProjectPath, @Nullable String rootProjectPath) {
if (rootProjectPath == null) {
File rootProjectDir = new File(targetProjectPath);
if (rootProjectDir.isFile()) {
rootProjectDir = rootProjectDir.getParentFile();
}
return rootProjectDir.getName();
}
File rootProjectDir = new File(rootProjectPath);
if (rootProjectDir.isFile()) {
rootProjectDir = rootProjectDir.getParentFile();
}
File targetProjectDir = new File(targetProjectPath);
if (targetProjectDir.isFile()) {
targetProjectDir = targetProjectDir.getParentFile();
}
StringBuilder buffer = new StringBuilder();
for (File f = targetProjectDir; f != null && !FileUtil.filesEqual(f, rootProjectDir); f = f.getParentFile()) {
buffer.insert(0, f.getName()).insert(0, ":");
}
buffer.insert(0, rootProjectDir.getName());
return buffer.toString();
}
/**
* There is a possible case that external project linked to an ide project is a multi-project, i.e. contains more than one
* module.
* <p/>
* This method tries to find root project's config path assuming that given path points to a sub-project's config path.
*
* @param externalProjectPath external sub-project's config path
* @param externalSystemId target external system
* @param project target ide project
* @return root external project's path if given path is considered to point to a known sub-project's config;
* {@code null} if it's not possible to find a root project's config path on the basis of the
* given path
*/
public static @Nullable String getRootProjectPath(@NotNull String externalProjectPath,
@NotNull ProjectSystemId externalSystemId,
@NotNull Project project) {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
return null;
}
if (manager instanceof ExternalSystemAutoImportAware) {
return ((ExternalSystemAutoImportAware)manager).getAffectedExternalProjectPath(externalProjectPath, project);
}
return null;
}
/**
* {@link RemoteUtil#unwrap(Throwable) unwraps} given exception if possible and builds error message for it.
*
* @param e exception to process
* @return error message for the given exception
*/
public static @NotNull @Nls String buildErrorMessage(@NotNull Throwable e) {
Throwable unwrapped = RemoteUtil.unwrap(e);
String reason = unwrapped.getLocalizedMessage();
if (!StringUtil.isEmpty(reason)) {
return reason;
}
else if (unwrapped.getClass() == ExternalSystemException.class) {
String originalReason = ((ExternalSystemException)unwrapped).getOriginalReason();
return ExternalSystemBundle.message("external.system.api.error.message.prefix", originalReason);
}
else {
return stacktraceAsString(unwrapped);
}
}
public static @NotNull @NlsSafe String stacktraceAsString(@NotNull Throwable throwable) {
Throwable unwrapped = RemoteUtil.unwrap(throwable);
StringWriter writer = new StringWriter();
unwrapped.printStackTrace(new PrintWriter(writer));
return writer.toString();
}
public static @NotNull AbstractExternalSystemSettings getSettings(@NotNull Project project, @NotNull ProjectSystemId externalSystemId)
throws IllegalArgumentException {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
throw new IllegalArgumentException(String.format(
"Can't retrieve external system settings for id '%s'. Reason: no such external system is registered",
externalSystemId.getReadableName()
));
}
return manager.getSettingsProvider().fun(project);
}
@SuppressWarnings("unchecked")
public static <S extends AbstractExternalSystemLocalSettings> S getLocalSettings(@NotNull Project project,
@NotNull ProjectSystemId externalSystemId)
throws IllegalArgumentException {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
throw new IllegalArgumentException(String.format(
"Can't retrieve local external system settings for id '%s'. Reason: no such external system is registered",
externalSystemId.getReadableName()
));
}
return (S)manager.getLocalSettingsProvider().fun(project);
}
@SuppressWarnings("unchecked")
public static <S extends ExternalSystemExecutionSettings> S getExecutionSettings(@NotNull Project project,
@NotNull String linkedProjectPath,
@NotNull ProjectSystemId externalSystemId)
throws IllegalArgumentException {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(externalSystemId);
if (manager == null) {
throw new IllegalArgumentException(String.format(
"Can't retrieve external system execution settings for id '%s'. Reason: no such external system is registered",
externalSystemId.getReadableName()
));
}
return (S)manager.getExecutionSettingsProvider().fun(Pair.create(project, linkedProjectPath));
}
/**
* Historically we prefer to work with third-party api not from ide process but from dedicated slave process (there is a risk
* that third-party api has bugs which might make the whole ide process corrupted, e.g. a memory leak at the api might crash
* the whole ide process).
* <p/>
* However, we do allow to explicitly configure the ide to work with third-party external system api from the ide process.
* <p/>
* This method allows to check whether the ide is configured to use 'out of process' or 'in process' mode for the system.
*
* @param externalSystemId target external system
* @return {@code true} if the ide is configured to work with external system api from the ide process;
* {@code false} otherwise
*/
public static boolean isInProcessMode(ProjectSystemId externalSystemId) {
return Registry.is(externalSystemId.getId() + ExternalSystemConstants.USE_IN_PROCESS_COMMUNICATION_REGISTRY_KEY_SUFFIX, false);
}
public static ProjectModelExternalSource toExternalSource(@NotNull ProjectSystemId systemId) {
return ExternalProjectSystemRegistry.getInstance().getSourceById(systemId.getId());
}
@Contract(value = "_, null -> false", pure = true)
public static boolean isExternalSystemAwareModule(@NotNull ProjectSystemId systemId, @Nullable Module module) {
return module != null &&
!module.isDisposed() &&
systemId.getId().equals(ExternalSystemModulePropertyManager.getInstance(module).getExternalSystemId());
}
@Contract(value = "_, null -> false", pure = true)
public static boolean isExternalSystemAwareModule(@NotNull String systemId, @Nullable Module module) {
return module != null &&
!module.isDisposed() &&
systemId.equals(ExternalSystemModulePropertyManager.getInstance(module).getExternalSystemId());
}
@Contract(pure = true)
public static @Nullable String getExternalProjectPath(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getLinkedProjectPath() : null;
}
@Contract(pure = true)
public static @Nullable String getExternalRootProjectPath(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getRootProjectPath() : null;
}
@Contract(pure = true)
public static @Nullable String getExternalProjectId(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getLinkedProjectId() : null;
}
@Contract(pure = true)
public static @Nullable String getExternalProjectGroup(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getExternalModuleGroup() : null;
}
private static final ExtensionPointName<ExternalSystemContentRootContributor> ExternalSystemContentRootContributorEP =
ExtensionPointName.create("com.intellij.externalSystemContentRootContributor");
@Contract(pure = true)
public static @Nullable Collection<ExternalSystemContentRootContributor.@NotNull ExternalContentRoot> getExternalProjectContentRoots(
@NotNull Module module,
@NotNull Collection<@NotNull ExternalSystemSourceType> sourceTypes
) {
if (module.isDisposed()) return null;
String systemId = ExternalSystemModulePropertyManager.getInstance(module).getExternalSystemId();
if (systemId == null) return null;
ExternalSystemContentRootContributor contributor =
ExternalSystemContentRootContributorEP.findFirstSafe((c) -> c.isApplicable(systemId));
if (contributor == null) return null;
return contributor.findContentRoots(module, sourceTypes);
}
@Contract(pure = true)
public static @Nullable Collection<ExternalSystemContentRootContributor.@NotNull ExternalContentRoot> getExternalProjectContentRoots(
@NotNull Module module,
@NotNull ExternalSystemSourceType sourceType
) {
return getExternalProjectContentRoots(module, List.of(sourceType));
}
@Contract(pure = true)
public static @Nullable String getExternalProjectVersion(@Nullable Module module) {
return module != null && !module.isDisposed()
? ExternalSystemModulePropertyManager.getInstance(module).getExternalModuleVersion()
: null;
}
@Contract(pure = true)
public static @Nullable String getExternalModuleType(@Nullable Module module) {
return module != null && !module.isDisposed() ? ExternalSystemModulePropertyManager.getInstance(module).getExternalModuleType() : null;
}
public static void subscribe(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull ExternalSystemSettingsListener listener) {
//noinspection unchecked
getSettings(project, systemId).subscribe(listener, project);
}
public static void subscribe(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull ExternalSystemSettingsListener listener,
@NotNull Disposable parentDisposable) {
//noinspection unchecked
getSettings(project, systemId).subscribe(listener, parentDisposable);
}
public static @NotNull Collection<TaskData> findProjectTasks(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull String projectPath) {
AbstractExternalSystemSettings settings = getSettings(project, systemId);
ExternalProjectSettings linkedProjectSettings = settings.getLinkedProjectSettings(projectPath);
if (linkedProjectSettings == null) return Collections.emptyList();
ExternalProjectInfo projectInfo = ContainerUtil.find(
ProjectDataManager.getInstance().getExternalProjectsData(project, systemId),
info -> FileUtil.pathsEqual(linkedProjectSettings.getExternalProjectPath(), info.getExternalProjectPath())
);
if (projectInfo == null) return Collections.emptyList();
DataNode<ProjectData> projectStructure = projectInfo.getExternalProjectStructure();
if (projectStructure == null) return Collections.emptyList();
List<TaskData> tasks = new SmartList<>();
DataNode<ModuleData> moduleDataNode = ContainerUtil.find(
findAll(projectStructure, ProjectKeys.MODULE),
moduleNode -> FileUtil.pathsEqual(projectPath, moduleNode.getData().getLinkedExternalProjectPath())
);
if (moduleDataNode == null) return Collections.emptyList();
findAll(moduleDataNode, ProjectKeys.TASK).stream().map(DataNode::getData).forEach(tasks::add);
return tasks;
}
@ApiStatus.Experimental
public static @Nullable DataNode<ProjectData> findProjectData(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull String projectPath) {
ExternalProjectInfo projectInfo = findProjectInfo(project, systemId, projectPath);
if (projectInfo == null) return null;
return projectInfo.getExternalProjectStructure();
}
@ApiStatus.Experimental
public static @Nullable ExternalProjectInfo findProjectInfo(@NotNull Project project,
@NotNull ProjectSystemId systemId,
@NotNull String projectPath) {
AbstractExternalSystemSettings settings = getSettings(project, systemId);
ExternalProjectSettings linkedProjectSettings = settings.getLinkedProjectSettings(projectPath);
if (linkedProjectSettings == null) return null;
String rootProjectPath = linkedProjectSettings.getExternalProjectPath();
return ProjectDataManager.getInstance().getExternalProjectData(project, systemId, rootProjectPath);
}
public static @NotNull FileChooserDescriptor getExternalProjectConfigDescriptor(@NotNull ProjectSystemId systemId) {
ExternalSystemManager<?, ?, ?, ?, ?> manager = getManager(systemId);
if (manager instanceof ExternalSystemUiAware) {
ExternalSystemUiAware uiAware = ((ExternalSystemUiAware)manager);
FileChooserDescriptor descriptor = uiAware.getExternalProjectConfigDescriptor();
if (descriptor != null) {
return descriptor;
}
}
return FileChooserDescriptorFactory.createSingleLocalFileDescriptor();
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import com.facebook.buck.core.cell.TestCellPathResolver;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.rulekey.RuleKey;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.core.sourcepath.FakeSourcePath;
import com.facebook.buck.core.sourcepath.PathSourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.core.toolchain.tool.impl.HashedFileTool;
import com.facebook.buck.cxx.toolchain.DebugPathSanitizer;
import com.facebook.buck.cxx.toolchain.MungingDebugPathSanitizer;
import com.facebook.buck.cxx.toolchain.linker.GnuLinker;
import com.facebook.buck.cxx.toolchain.linker.Linker;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.SanitizedArg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.rules.keys.DefaultRuleKeyFactory;
import com.facebook.buck.rules.keys.TestDefaultRuleKeyFactory;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
import org.junit.Test;
public class CxxLinkTest {
private static final Path DEFAULT_OUTPUT = Paths.get("test.exe");
private static final ImmutableList<Arg> DEFAULT_ARGS =
ImmutableList.of(
StringArg.of("-rpath"),
StringArg.of("/lib"),
StringArg.of("libc.a"),
SourcePathArg.of(FakeSourcePath.of("a.o")),
SourcePathArg.of(FakeSourcePath.of("b.o")),
SourcePathArg.of(FakeSourcePath.of("libc.a")),
StringArg.of("-L"),
StringArg.of("/System/Libraries/libz.dynlib"),
StringArg.of("-llibz.dylib"));
private final ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
private final Linker DEFAULT_LINKER =
new GnuLinker(new HashedFileTool(PathSourcePath.of(projectFilesystem, Paths.get("ld"))));
@Test
public void testThatInputChangesCauseRuleKeyChanges() {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestActionGraphBuilder());
SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder);
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
FakeFileHashCache hashCache =
FakeFileHashCache.createFromStrings(
ImmutableMap.of(
"ld", Strings.repeat("0", 40),
"a.o", Strings.repeat("a", 40),
"b.o", Strings.repeat("b", 40),
"libc.a", Strings.repeat("c", 40),
"different", Strings.repeat("d", 40)));
// Generate a rule key for the defaults.
RuleKey defaultRuleKey =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
new CxxLink(
target,
projectFilesystem,
ruleFinder,
TestCellPathResolver.get(projectFilesystem),
DEFAULT_LINKER,
DEFAULT_OUTPUT,
ImmutableMap.of(),
DEFAULT_ARGS,
Optional.empty(),
Optional.empty(),
/* cacheable */ true,
/* thinLto */ false,
/* fatLto */ false));
// Verify that changing the archiver causes a rulekey change.
RuleKey linkerChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
new CxxLink(
target,
projectFilesystem,
ruleFinder,
TestCellPathResolver.get(projectFilesystem),
new GnuLinker(
new HashedFileTool(
PathSourcePath.of(projectFilesystem, Paths.get("different")))),
DEFAULT_OUTPUT,
ImmutableMap.of(),
DEFAULT_ARGS,
Optional.empty(),
Optional.empty(),
/* cacheable */ true,
/* thinLto */ false,
/* fatLto */ false));
assertNotEquals(defaultRuleKey, linkerChange);
// Verify that changing the output path causes a rulekey change.
RuleKey outputChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
new CxxLink(
target,
projectFilesystem,
ruleFinder,
TestCellPathResolver.get(projectFilesystem),
DEFAULT_LINKER,
Paths.get("different"),
ImmutableMap.of(),
DEFAULT_ARGS,
Optional.empty(),
Optional.empty(),
/* cacheable */ true,
/* thinLto */ false,
/* fatLto */ false));
assertNotEquals(defaultRuleKey, outputChange);
// Verify that changing the flags causes a rulekey change.
RuleKey flagsChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
new CxxLink(
target,
projectFilesystem,
ruleFinder,
TestCellPathResolver.get(projectFilesystem),
DEFAULT_LINKER,
DEFAULT_OUTPUT,
ImmutableMap.of(),
ImmutableList.of(SourcePathArg.of(FakeSourcePath.of("different"))),
Optional.empty(),
Optional.empty(),
/* cacheable */ true,
/* thinLto */ false,
/* fatLto */ false));
assertNotEquals(defaultRuleKey, flagsChange);
}
@Test
public void sanitizedPathsInFlagsDoNotAffectRuleKey() {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestActionGraphBuilder());
SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder);
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
DefaultRuleKeyFactory ruleKeyFactory =
new TestDefaultRuleKeyFactory(
FakeFileHashCache.createFromStrings(
ImmutableMap.of(
"ld", Strings.repeat("0", 40),
"a.o", Strings.repeat("a", 40),
"b.o", Strings.repeat("b", 40),
"libc.a", Strings.repeat("c", 40),
"different", Strings.repeat("d", 40))),
pathResolver,
ruleFinder);
// Set up a map to sanitize the differences in the flags.
int pathSize = 10;
DebugPathSanitizer sanitizer1 =
new MungingDebugPathSanitizer(
pathSize,
File.separatorChar,
Paths.get("PWD"),
ImmutableBiMap.of(Paths.get("something"), "A"));
DebugPathSanitizer sanitizer2 =
new MungingDebugPathSanitizer(
pathSize,
File.separatorChar,
Paths.get("PWD"),
ImmutableBiMap.of(Paths.get("different"), "A"));
// Generate a rule with a path we need to sanitize to a consistent value.
ImmutableList<Arg> args1 =
ImmutableList.of(
SanitizedArg.create(sanitizer1.sanitize(Optional.empty()), "-Lsomething/foo"));
RuleKey ruleKey1 =
ruleKeyFactory.build(
new CxxLink(
target,
projectFilesystem,
ruleFinder,
TestCellPathResolver.get(projectFilesystem),
DEFAULT_LINKER,
DEFAULT_OUTPUT,
ImmutableMap.of(),
args1,
Optional.empty(),
Optional.empty(),
/* cacheable */ true,
/* thinLto */ false,
/* fatLto */ false));
// Generate another rule with a different path we need to sanitize to the
// same consistent value as above.
ImmutableList<Arg> args2 =
ImmutableList.of(
SanitizedArg.create(sanitizer2.sanitize(Optional.empty()), "-Ldifferent/foo"));
RuleKey ruleKey2 =
ruleKeyFactory.build(
new CxxLink(
target,
projectFilesystem,
ruleFinder,
TestCellPathResolver.get(projectFilesystem),
DEFAULT_LINKER,
DEFAULT_OUTPUT,
ImmutableMap.of(),
args2,
Optional.empty(),
Optional.empty(),
/* cacheable */ true,
/* thinLto */ false,
/* fatLto */ false));
assertEquals(ruleKey1, ruleKey2);
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.backup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase;
import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl;
import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.backup.impl.FullTableBackupClient;
import org.apache.hadoop.hbase.backup.impl.IncrementalBackupManager;
import org.apache.hadoop.hbase.backup.impl.IncrementalTableBackupClient;
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.mapreduce.HadoopSecurityEnabledUserProviderForTesting;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.access.SecureTestUtil;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.junit.AfterClass;
import org.junit.Before;
/**
* This class is only a base for other integration-level backup tests. Do not add tests here.
* TestBackupSmallTests is where tests that don't require bring machines up/down should go All other
* tests should have their own classes and extend this one
*/
public class TestBackupBase {
private static final Log LOG = LogFactory.getLog(TestBackupBase.class);
protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected static HBaseTestingUtility TEST_UTIL2;
protected static Configuration conf1 = TEST_UTIL.getConfiguration();
protected static Configuration conf2;
protected static TableName table1 = TableName.valueOf("table1");
protected static HTableDescriptor table1Desc;
protected static TableName table2 = TableName.valueOf("table2");
protected static TableName table3 = TableName.valueOf("table3");
protected static TableName table4 = TableName.valueOf("table4");
protected static TableName table1_restore = TableName.valueOf("ns1:table1_restore");
protected static TableName table2_restore = TableName.valueOf("ns2:table2_restore");
protected static TableName table3_restore = TableName.valueOf("ns3:table3_restore");
protected static TableName table4_restore = TableName.valueOf("ns4:table4_restore");
protected static final int NB_ROWS_IN_BATCH = 99;
protected static final byte[] qualName = Bytes.toBytes("q1");
protected static final byte[] famName = Bytes.toBytes("f");
protected static String BACKUP_ROOT_DIR = "/backupUT";
protected static String BACKUP_REMOTE_ROOT_DIR = "/backupUT";
protected static String provider = "defaultProvider";
protected static boolean secure = false;
protected static boolean autoRestoreOnFailure = true;
protected static boolean setupIsDone = false;
protected static boolean useSecondCluster = false;
static class IncrementalTableBackupClientForTest extends IncrementalTableBackupClient
{
public IncrementalTableBackupClientForTest() {
}
public IncrementalTableBackupClientForTest(Connection conn,
String backupId, BackupRequest request) throws IOException {
super(conn, backupId, request);
}
@Override
public void execute() throws IOException
{
// case INCREMENTAL_COPY:
try {
// case PREPARE_INCREMENTAL:
failStageIf(Stage.stage_0);
beginBackup(backupManager, backupInfo);
failStageIf(Stage.stage_1);
backupInfo.setPhase(BackupPhase.PREPARE_INCREMENTAL);
LOG.debug("For incremental backup, current table set is "
+ backupManager.getIncrementalBackupTableSet());
newTimestamps = ((IncrementalBackupManager) backupManager).getIncrBackupLogFileMap();
// copy out the table and region info files for each table
BackupUtils.copyTableRegionInfo(conn, backupInfo, conf);
// convert WAL to HFiles and copy them to .tmp under BACKUP_ROOT
convertWALsToHFiles(backupInfo);
incrementalCopyHFiles(backupInfo);
failStageIf(Stage.stage_2);
// Save list of WAL files copied
backupManager.recordWALFiles(backupInfo.getIncrBackupFileList());
// case INCR_BACKUP_COMPLETE:
// set overall backup status: complete. Here we make sure to complete the backup.
// After this checkpoint, even if entering cancel process, will let the backup finished
// Set the previousTimestampMap which is before this current log roll to the manifest.
HashMap<TableName, HashMap<String, Long>> previousTimestampMap =
backupManager.readLogTimestampMap();
backupInfo.setIncrTimestampMap(previousTimestampMap);
// The table list in backupInfo is good for both full backup and incremental backup.
// For incremental backup, it contains the incremental backup table set.
backupManager.writeRegionServerLogTimestamp(backupInfo.getTables(), newTimestamps);
failStageIf(Stage.stage_3);
HashMap<TableName, HashMap<String, Long>> newTableSetTimestampMap =
backupManager.readLogTimestampMap();
Long newStartCode =
BackupUtils.getMinValue(BackupUtils.getRSLogTimestampMins(newTableSetTimestampMap));
backupManager.writeBackupStartCode(newStartCode);
handleBulkLoad(backupInfo.getTableNames());
failStageIf(Stage.stage_4);
// backup complete
completeBackup(conn, backupInfo, backupManager, BackupType.INCREMENTAL, conf);
} catch (Exception e) {
failBackup(conn, backupInfo, backupManager, e, "Unexpected Exception : ",
BackupType.INCREMENTAL, conf);
throw new IOException(e);
}
}
}
static class FullTableBackupClientForTest extends FullTableBackupClient
{
public FullTableBackupClientForTest() {
}
public FullTableBackupClientForTest(Connection conn, String backupId, BackupRequest request)
throws IOException {
super(conn, backupId, request);
}
@Override
public void execute() throws IOException
{
// Get the stage ID to fail on
try (Admin admin = conn.getAdmin();) {
// Begin BACKUP
beginBackup(backupManager, backupInfo);
failStageIf(Stage.stage_0);
String savedStartCode = null;
boolean firstBackup = false;
// do snapshot for full table backup
savedStartCode = backupManager.readBackupStartCode();
firstBackup = savedStartCode == null || Long.parseLong(savedStartCode) == 0L;
if (firstBackup) {
// This is our first backup. Let's put some marker to system table so that we can hold the logs
// while we do the backup.
backupManager.writeBackupStartCode(0L);
}
failStageIf(Stage.stage_1);
// We roll log here before we do the snapshot. It is possible there is duplicate data
// in the log that is already in the snapshot. But if we do it after the snapshot, we
// could have data loss.
// A better approach is to do the roll log on each RS in the same global procedure as
// the snapshot.
LOG.info("Execute roll log procedure for full backup ...");
Map<String, String> props = new HashMap<String, String>();
props.put("backupRoot", backupInfo.getBackupRootDir());
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
failStageIf(Stage.stage_2);
newTimestamps = backupManager.readRegionServerLastLogRollResult();
if (firstBackup) {
// Updates registered log files
// We record ALL old WAL files as registered, because
// this is a first full backup in the system and these
// files are not needed for next incremental backup
List<String> logFiles = BackupUtils.getWALFilesOlderThan(conf, newTimestamps);
backupManager.recordWALFiles(logFiles);
}
// SNAPSHOT_TABLES:
backupInfo.setPhase(BackupPhase.SNAPSHOT);
for (TableName tableName : tableList) {
String snapshotName =
"snapshot_" + Long.toString(EnvironmentEdgeManager.currentTime()) + "_"
+ tableName.getNamespaceAsString() + "_" + tableName.getQualifierAsString();
snapshotTable(admin, tableName, snapshotName);
backupInfo.setSnapshotName(tableName, snapshotName);
}
failStageIf(Stage.stage_3);
// SNAPSHOT_COPY:
// do snapshot copy
LOG.debug("snapshot copy for " + backupId);
snapshotCopy(backupInfo);
// Updates incremental backup table set
backupManager.addIncrementalBackupTableSet(backupInfo.getTables());
// BACKUP_COMPLETE:
// set overall backup status: complete. Here we make sure to complete the backup.
// After this checkpoint, even if entering cancel process, will let the backup finished
backupInfo.setState(BackupState.COMPLETE);
// The table list in backupInfo is good for both full backup and incremental backup.
// For incremental backup, it contains the incremental backup table set.
backupManager.writeRegionServerLogTimestamp(backupInfo.getTables(), newTimestamps);
HashMap<TableName, HashMap<String, Long>> newTableSetTimestampMap =
backupManager.readLogTimestampMap();
Long newStartCode =
BackupUtils.getMinValue(BackupUtils
.getRSLogTimestampMins(newTableSetTimestampMap));
backupManager.writeBackupStartCode(newStartCode);
failStageIf(Stage.stage_4);
// backup complete
completeBackup(conn, backupInfo, backupManager, BackupType.FULL, conf);
} catch (Exception e) {
if(autoRestoreOnFailure) {
failBackup(conn, backupInfo, backupManager, e, "Unexpected BackupException : ",
BackupType.FULL, conf);
}
throw new IOException(e);
}
}
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
if (setupIsDone) {
return;
}
if (secure) {
// set the always on security provider
UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(),
HadoopSecurityEnabledUserProviderForTesting.class);
// setup configuration
SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration());
}
String coproc = conf1.get(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY);
conf1.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, (coproc == null ? "" : coproc + ",") +
BackupObserver.class.getName());
conf1.setBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY, true);
BackupManager.decorateMasterConfiguration(conf1);
BackupManager.decorateRegionServerConfiguration(conf1);
conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
// Set MultiWAL (with 2 default WAL files per RS)
conf1.set(WALFactory.WAL_PROVIDER, provider);
TEST_UTIL.startMiniCluster();
if (useSecondCluster) {
conf2 = HBaseConfiguration.create(conf1);
conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2");
TEST_UTIL2 = new HBaseTestingUtility(conf2);
TEST_UTIL2.setZkCluster(TEST_UTIL.getZkCluster());
TEST_UTIL2.startMiniCluster();
}
conf1 = TEST_UTIL.getConfiguration();
TEST_UTIL.startMiniMapReduceCluster();
BACKUP_ROOT_DIR = TEST_UTIL.getConfiguration().get("fs.defaultFS") + "/backupUT";
LOG.info("ROOTDIR " + BACKUP_ROOT_DIR);
if (useSecondCluster) {
BACKUP_REMOTE_ROOT_DIR = TEST_UTIL2.getConfiguration().get("fs.defaultFS") + "/backupUT";
LOG.info("REMOTE ROOTDIR " + BACKUP_REMOTE_ROOT_DIR);
}
createTables();
populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1);
setupIsDone = true;
}
private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) {
Iterator<Entry<String, String>> it = masterConf.iterator();
while (it.hasNext()) {
Entry<String, String> e = it.next();
conf.set(e.getKey(), e.getValue());
}
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDown() throws Exception {
try{
SnapshotTestingUtils.deleteAllSnapshots(TEST_UTIL.getHBaseAdmin());
} catch (Exception e) {
}
SnapshotTestingUtils.deleteArchiveDirectory(TEST_UTIL);
if (useSecondCluster) {
TEST_UTIL2.shutdownMiniCluster();
}
TEST_UTIL.shutdownMiniCluster();
TEST_UTIL.shutdownMiniMapReduceCluster();
}
HTable insertIntoTable(Connection conn, TableName table, byte[] family, int id, int numRows)
throws IOException {
HTable t = (HTable) conn.getTable(table);
Put p1;
for (int i = 0; i < numRows; i++) {
p1 = new Put(Bytes.toBytes("row-" + table + "-" + id + "-" + i));
p1.addColumn(family, qualName, Bytes.toBytes("val" + i));
t.put(p1);
}
return t;
}
protected BackupRequest createBackupRequest(BackupType type,
List<TableName> tables, String path) {
BackupRequest.Builder builder = new BackupRequest.Builder();
BackupRequest request = builder.withBackupType(type)
.withTableList(tables)
.withTargetRootDir(path).build();
return request;
}
protected String backupTables(BackupType type, List<TableName> tables, String path)
throws IOException {
Connection conn = null;
BackupAdmin badmin = null;
String backupId;
try {
conn = ConnectionFactory.createConnection(conf1);
badmin = new BackupAdminImpl(conn);
BackupRequest request = createBackupRequest(type, tables, path);
backupId = badmin.backupTables(request);
} finally {
if (badmin != null) {
badmin.close();
}
if (conn != null) {
conn.close();
}
}
return backupId;
}
protected String fullTableBackup(List<TableName> tables) throws IOException {
return backupTables(BackupType.FULL, tables, BACKUP_ROOT_DIR);
}
protected String incrementalTableBackup(List<TableName> tables) throws IOException {
return backupTables(BackupType.INCREMENTAL, tables, BACKUP_ROOT_DIR);
}
protected static void loadTable(Table table) throws Exception {
Put p; // 100 + 1 row to t1_syncup
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
p = new Put(Bytes.toBytes("row" + i));
p.setDurability(Durability.SKIP_WAL);
p.addColumn(famName, qualName, Bytes.toBytes("val" + i));
table.put(p);
}
}
protected static void createTables() throws Exception {
long tid = System.currentTimeMillis();
table1 = TableName.valueOf("ns1:test-" + tid);
HBaseAdmin ha = TEST_UTIL.getHBaseAdmin();
// Create namespaces
NamespaceDescriptor desc1 = NamespaceDescriptor.create("ns1").build();
NamespaceDescriptor desc2 = NamespaceDescriptor.create("ns2").build();
NamespaceDescriptor desc3 = NamespaceDescriptor.create("ns3").build();
NamespaceDescriptor desc4 = NamespaceDescriptor.create("ns4").build();
ha.createNamespace(desc1);
ha.createNamespace(desc2);
ha.createNamespace(desc3);
ha.createNamespace(desc4);
HTableDescriptor desc = new HTableDescriptor(table1);
HColumnDescriptor fam = new HColumnDescriptor(famName);
desc.addFamily(fam);
ha.createTable(desc);
table1Desc = desc;
Connection conn = ConnectionFactory.createConnection(conf1);
Table table = conn.getTable(table1);
loadTable(table);
table.close();
table2 = TableName.valueOf("ns2:test-" + tid + 1);
desc = new HTableDescriptor(table2);
desc.addFamily(fam);
ha.createTable(desc);
table = conn.getTable(table2);
loadTable(table);
table.close();
table3 = TableName.valueOf("ns3:test-" + tid + 2);
table = TEST_UTIL.createTable(table3, famName);
table.close();
table4 = TableName.valueOf("ns4:test-" + tid + 3);
table = TEST_UTIL.createTable(table4, famName);
table.close();
ha.close();
conn.close();
}
protected boolean checkSucceeded(String backupId) throws IOException {
BackupInfo status = getBackupInfo(backupId);
if (status == null) return false;
return status.getState() == BackupState.COMPLETE;
}
protected boolean checkFailed(String backupId) throws IOException {
BackupInfo status = getBackupInfo(backupId);
if (status == null) return false;
return status.getState() == BackupState.FAILED;
}
private BackupInfo getBackupInfo(String backupId) throws IOException {
try (BackupSystemTable table = new BackupSystemTable(TEST_UTIL.getConnection())) {
BackupInfo status = table.readBackupInfo(backupId);
return status;
}
}
protected BackupAdmin getBackupAdmin() throws IOException {
return new BackupAdminImpl(TEST_UTIL.getConnection());
}
/**
* Helper method
*/
protected List<TableName> toList(String... args) {
List<TableName> ret = new ArrayList<>();
for (int i = 0; i < args.length; i++) {
ret.add(TableName.valueOf(args[i]));
}
return ret;
}
protected void dumpBackupDir() throws IOException {
// Dump Backup Dir
FileSystem fs = FileSystem.get(conf1);
RemoteIterator<LocatedFileStatus> it = fs.listFiles(new Path(BACKUP_ROOT_DIR), true);
while (it.hasNext()) {
LOG.debug(it.next().getPath());
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.lookup.Lookup;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.diagnostic.LogEventException;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.lang.FileASTNode;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.RangeMarkerEx;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.DebugUtil;
import java.util.List;
/**
* @author peter
*/
class CompletionAssertions {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionAssertions");
static void assertCommitSuccessful(Editor editor, PsiFile psiFile) {
Document document = editor.getDocument();
int docLength = document.getTextLength();
int psiLength = psiFile.getTextLength();
PsiDocumentManager manager = PsiDocumentManager.getInstance(psiFile.getProject());
boolean committed = !manager.isUncommited(document);
if (docLength == psiLength && committed) {
return;
}
String message = "unsuccessful commit:";
message += "\nmatching=" + (psiFile == manager.getPsiFile(document));
message += "\ninjectedEditor=" + (editor instanceof EditorWindow);
message += "\ninjectedFile=" + InjectedLanguageManager.getInstance(psiFile.getProject()).isInjectedFragment(psiFile);
message += "\ncommitted=" + committed;
message += "\nfile=" + psiFile.getName();
message += "\nfile class=" + psiFile.getClass();
message += "\nfile.valid=" + psiFile.isValid();
message += "\nlanguage=" + psiFile.getLanguage();
message += "\ndoc.length=" + docLength;
message += "\npsiFile.length=" + psiLength;
String fileText = psiFile.getText();
if (fileText != null) {
message += "\npsiFile.text.length=" + fileText.length();
}
FileASTNode node = psiFile.getNode();
if (node != null) {
message += "\nnode.length=" + node.getTextLength();
String nodeText = node.getText();
if (nodeText != null) {
message += "\nnode.text.length=" + nodeText.length();
}
}
message += "\n" + DebugUtil.currentStackTrace();
throw new LogEventException("Commit unsuccessful", message,
new Attachment(psiFile.getViewProvider().getVirtualFile().getPath() + "_file.txt", fileText),
createAstAttachment(psiFile, psiFile),
new Attachment("docText.txt", document.getText()));
}
static void checkEditorValid(Editor editor) {
if (editor instanceof EditorWindow && !((EditorWindow)editor).isValid()) {
throw new AssertionError();
}
}
private static Attachment createAstAttachment(PsiFile fileCopy, final PsiFile originalFile) {
return new Attachment(originalFile.getViewProvider().getVirtualFile().getPath() + " syntactic tree.txt", DebugUtil.psiToString(fileCopy, false, true));
}
private static Attachment createFileTextAttachment(PsiFile fileCopy, final PsiFile originalFile) {
return new Attachment(originalFile.getViewProvider().getVirtualFile().getPath(), fileCopy.getText());
}
static void assertFinalOffsets(PsiFile originalFile, CompletionContext context, PsiFile injected) {
if (context.getStartOffset() >= context.file.getTextLength()) {
String msg = "start outside the file; file=" + context.file + " " + context.file.getTextLength();
msg += "; injected=" + (injected != null);
msg += "; original " + originalFile + " " + originalFile.getTextLength();
throw new AssertionError(msg);
}
assert context.getStartOffset() >= 0 : "start < 0";
}
static void assertInjectedOffsets(int hostStartOffset,
InjectedLanguageManager injectedLanguageManager,
PsiFile injected,
DocumentWindow documentWindow) {
assert documentWindow != null : "no DocumentWindow for an injected fragment";
TextRange host = injectedLanguageManager.injectedToHost(injected, injected.getTextRange());
assert hostStartOffset >= host.getStartOffset() : "startOffset before injected";
assert hostStartOffset <= host.getEndOffset() : "startOffset after injected";
}
static void assertHostInfo(PsiFile hostCopy, OffsetMap hostMap) {
assert hostCopy.isValid() : "file became invalid: " + hostCopy.getClass();
if (hostMap.getOffset(CompletionInitializationContext.START_OFFSET) >= hostCopy.getTextLength()) {
throw new AssertionError("startOffset outside the host file: " + hostMap.getOffset(CompletionInitializationContext.START_OFFSET) + "; " + hostCopy);
}
}
static void assertCompletionPositionPsiConsistent(CompletionContext newContext,
int offset,
PsiFile fileCopy,
PsiFile originalFile, PsiElement insertedElement) {
if (insertedElement == null) {
throw new LogEventException("No element at insertion offset",
"offset=" +
newContext.getStartOffset() +
"\n" +
DebugUtil.currentStackTrace(),
createFileTextAttachment(fileCopy, originalFile),
createAstAttachment(fileCopy, originalFile));
}
if (fileCopy.findElementAt(offset) != insertedElement) {
throw new AssertionError("wrong offset");
}
final TextRange range = insertedElement.getTextRange();
String fileCopyText = fileCopy.getText();
if ((range.getEndOffset() > fileCopyText.length()) || !range.substring(fileCopyText).equals(insertedElement.getText())) {
throw new LogEventException("Inconsistent completion tree", "range=" + range + "\n" + DebugUtil.currentStackTrace(),
createFileTextAttachment(fileCopy, originalFile), createAstAttachment(fileCopy, originalFile),
new Attachment("Element at caret.txt", insertedElement.getText()));
}
}
static class WatchingInsertionContext extends InsertionContext {
private RangeMarkerEx tailWatcher;
String invalidateTrace;
DocumentEvent killer;
private RangeMarkerSpy spy;
public WatchingInsertionContext(CompletionProgressIndicator indicator, char completionChar, List<LookupElement> items, Editor editor) {
super(indicator.getOffsetMap(), completionChar, items.toArray(new LookupElement[items.size()]),
indicator.getParameters().getOriginalFile(), editor,
completionChar != Lookup.AUTO_INSERT_SELECT_CHAR && completionChar != Lookup.REPLACE_SELECT_CHAR &&
completionChar != Lookup.NORMAL_SELECT_CHAR);
}
@Override
public void setTailOffset(int offset) {
super.setTailOffset(offset);
watchTail(offset);
}
private void watchTail(int offset) {
stopWatching();
tailWatcher = (RangeMarkerEx)getDocument().createRangeMarker(offset, offset);
if (!tailWatcher.isValid()) {
throw new AssertionError(getDocument() + "; offset=" + offset);
}
tailWatcher.setGreedyToRight(true);
spy = new RangeMarkerSpy(tailWatcher) {
@Override
protected void invalidated(DocumentEvent e) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
LOG.error("Tail offset invalidated, say thanks to the "+ e);
}
if (invalidateTrace == null) {
invalidateTrace = DebugUtil.currentStackTrace();
killer = e;
}
}
};
getDocument().addDocumentListener(spy);
}
void stopWatching() {
if (tailWatcher != null) {
getDocument().removeDocumentListener(spy);
tailWatcher.dispose();
}
}
@Override
public int getTailOffset() {
int offset = super.getTailOffset();
if (tailWatcher.getStartOffset() != tailWatcher.getEndOffset() && offset > 0) {
watchTail(offset);
}
return offset;
}
}
}
| |
/**
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.sesame.marketdata;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.core.value.MarketDataRequirementNames;
import com.opengamma.id.ExternalIdBundle;
/**
* Key for requesting market data that has no associated metadata, only an ID.
* <p>
* This shouldn't be used unless absolutely necessary. Market data requested in this way can't be filtered
* when running scenarios except by ID.
*
* @param <T> the type of the market data
*/
@BeanDefinition(builderScope = "private")
public final class RawId<T> implements MarketDataId, ImmutableBean {
/** The ID of the market data. */
@PropertyDefinition(validate = "notNull")
private final ExternalIdBundle _id;
/** The expected type of the market data. */
@PropertyDefinition(validate = "notNull")
private final Class<T> _marketDataType;
// TODO this isn't right - there can be IDs for multiple market data providers in the bundle, need multiple field names
/** The field name of the market data in the market data record. */
@PropertyDefinition(validate = "notNull")
private final FieldName _fieldName;
/**
* Creates a key for requesting the market value of an ID.
* <p>
* The field name {@link MarketDataRequirementNames#MARKET_VALUE} is used to look up the data in the record.
*
* @param id ID of the market data
* @return a key for looking up the data
*/
public static RawId<Double> of(ExternalIdBundle id) {
return new RawId<>(id, Double.class, MarketDataUtils.MARKET_VALUE);
}
/**
* Creates a key for requesting a numerical market data value for an ID.
*
* @param id ID of the market data
* @param fieldName the field name of the required market data in the record
* @return a key for looking up the data
*/
public static RawId<Double> of(ExternalIdBundle id, FieldName fieldName) {
return new RawId<>(id, Double.class, fieldName);
}
/**
* Creates a key for requesting the market value of an ID.
* <p>
* The field name {@link MarketDataRequirementNames#MARKET_VALUE} is used to look up the data in the record.
*
* @param id ID of the market data
* @param marketDataType the type of the market data
* @return a key for looking up the data
* @param <U> the type of the market data
*/
public static <U> RawId<U> of(ExternalIdBundle id, Class<U> marketDataType) {
return new RawId<>(id, marketDataType, MarketDataUtils.MARKET_VALUE);
}
/**
* Creates a key for requesting a numerical market data value for an ID.
*
* @param id ID of the market data
* @param fieldName the field name of the required market data in the record
* @param marketDataType the type of the market data
* @return a key for looking up the data
* @param <U> the type of the market data
*/
public static <U> RawId<U> of(ExternalIdBundle id, Class<U> marketDataType, FieldName fieldName) {
return new RawId<>(id, marketDataType, fieldName);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code RawId}.
* @return the meta-bean, not null
*/
@SuppressWarnings("rawtypes")
public static RawId.Meta meta() {
return RawId.Meta.INSTANCE;
}
/**
* The meta-bean for {@code RawId}.
* @param <R> the bean's generic type
* @param cls the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> RawId.Meta<R> metaRawId(Class<R> cls) {
return RawId.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(RawId.Meta.INSTANCE);
}
private RawId(
ExternalIdBundle id,
Class<T> marketDataType,
FieldName fieldName) {
JodaBeanUtils.notNull(id, "id");
JodaBeanUtils.notNull(marketDataType, "marketDataType");
JodaBeanUtils.notNull(fieldName, "fieldName");
this._id = id;
this._marketDataType = marketDataType;
this._fieldName = fieldName;
}
@SuppressWarnings("unchecked")
@Override
public RawId.Meta<T> metaBean() {
return RawId.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the ID of the market data.
* @return the value of the property, not null
*/
public ExternalIdBundle getId() {
return _id;
}
//-----------------------------------------------------------------------
/**
* Gets the expected type of the market data.
* @return the value of the property, not null
*/
public Class<T> getMarketDataType() {
return _marketDataType;
}
//-----------------------------------------------------------------------
/**
* Gets the field name of the market data in the market data record.
* @return the value of the property, not null
*/
public FieldName getFieldName() {
return _fieldName;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
RawId<?> other = (RawId<?>) obj;
return JodaBeanUtils.equal(getId(), other.getId()) &&
JodaBeanUtils.equal(getMarketDataType(), other.getMarketDataType()) &&
JodaBeanUtils.equal(getFieldName(), other.getFieldName());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getId());
hash = hash * 31 + JodaBeanUtils.hashCode(getMarketDataType());
hash = hash * 31 + JodaBeanUtils.hashCode(getFieldName());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("RawId{");
buf.append("id").append('=').append(getId()).append(',').append(' ');
buf.append("marketDataType").append('=').append(getMarketDataType()).append(',').append(' ');
buf.append("fieldName").append('=').append(JodaBeanUtils.toString(getFieldName()));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code RawId}.
* @param <T> the type
*/
public static final class Meta<T> extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code id} property.
*/
private final MetaProperty<ExternalIdBundle> _id = DirectMetaProperty.ofImmutable(
this, "id", RawId.class, ExternalIdBundle.class);
/**
* The meta-property for the {@code marketDataType} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<Class<T>> _marketDataType = DirectMetaProperty.ofImmutable(
this, "marketDataType", RawId.class, (Class) Class.class);
/**
* The meta-property for the {@code fieldName} property.
*/
private final MetaProperty<FieldName> _fieldName = DirectMetaProperty.ofImmutable(
this, "fieldName", RawId.class, FieldName.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"id",
"marketDataType",
"fieldName");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 3355: // id
return _id;
case 843057760: // marketDataType
return _marketDataType;
case 1265009317: // fieldName
return _fieldName;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends RawId<T>> builder() {
return new RawId.Builder<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends RawId<T>> beanType() {
return (Class) RawId.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code id} property.
* @return the meta-property, not null
*/
public MetaProperty<ExternalIdBundle> id() {
return _id;
}
/**
* The meta-property for the {@code marketDataType} property.
* @return the meta-property, not null
*/
public MetaProperty<Class<T>> marketDataType() {
return _marketDataType;
}
/**
* The meta-property for the {@code fieldName} property.
* @return the meta-property, not null
*/
public MetaProperty<FieldName> fieldName() {
return _fieldName;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 3355: // id
return ((RawId<?>) bean).getId();
case 843057760: // marketDataType
return ((RawId<?>) bean).getMarketDataType();
case 1265009317: // fieldName
return ((RawId<?>) bean).getFieldName();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code RawId}.
* @param <T> the type
*/
private static final class Builder<T> extends DirectFieldsBeanBuilder<RawId<T>> {
private ExternalIdBundle _id;
private Class<T> _marketDataType;
private FieldName _fieldName;
/**
* Restricted constructor.
*/
private Builder() {
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 3355: // id
return _id;
case 843057760: // marketDataType
return _marketDataType;
case 1265009317: // fieldName
return _fieldName;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@SuppressWarnings("unchecked")
@Override
public Builder<T> set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 3355: // id
this._id = (ExternalIdBundle) newValue;
break;
case 843057760: // marketDataType
this._marketDataType = (Class<T>) newValue;
break;
case 1265009317: // fieldName
this._fieldName = (FieldName) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder<T> set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder<T> setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder<T> setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder<T> setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public RawId<T> build() {
return new RawId<T>(
_id,
_marketDataType,
_fieldName);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("RawId.Builder{");
buf.append("id").append('=').append(JodaBeanUtils.toString(_id)).append(',').append(' ');
buf.append("marketDataType").append('=').append(JodaBeanUtils.toString(_marketDataType)).append(',').append(' ');
buf.append("fieldName").append('=').append(JodaBeanUtils.toString(_fieldName));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package ecc._163.game;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Scanner;
import java.util.Timer;
import java.util.TimerTask;
import ecc._163.net.Communicator;
import ecc._163.net.MessageWriter;
/**
* represents a room in which 163 may be played
*/
public class Room {
/**
* the value 163
*/
final public static int ONE_SIXTY_THREE = 163;
/**
* the number of cards given to users to make 163
*/
final public static int NUM_CARDS = 6;
/**
* the properties of this room, such as the room's name and ID
*/
private RoomProperties m_properties;
/**
* the rules for this room
*/
private RoomRules m_rules;
/**
* the number of rounds of 163 left to be played in this room
*/
protected int m_roundsLeft = 0;
/**
* cards being used in the current round of 163
*/
final private int[] m_cards = new int[ 6 ];
/**
* the countdown timer for this room
*/
private CountdownTimer m_timer;
/**
* the list of contestants participating in 163 in this room
*/
final private ArrayList< Contestant > m_contestants = new ArrayList< Contestant >();
/**
* creates a room in which rounds of 163 may be played
*
* @param properties the properties of this room, such as name and id
* @param rules the rules of 163 pertaining to this room
*/
public Room( RoomProperties properties , RoomRules rules ) {
this.m_properties = properties;
this.m_rules = rules;
}
/**
* @return the properties of this room
*/
public RoomProperties getProperties() {
return this.m_properties;
}
/**
* adds a contestant to this room
*
* @param clientConnection a connection to the client
* @param contestantDisplayName the display name of the contestant
*/
public void addContestant( Communicator clientConnection , String contestantDisplayName ) {
this.m_contestants.add( new Contestant( clientConnection , contestantDisplayName ) );
this.m_properties.setNumPlayersJoined( this.m_contestants.size() );
}
/**
* removes a contestant from this room
*
* @param c the contestant to remove
*/
public void removeContestant( Contestant c ) {
this.m_contestants.remove( c );
this.m_properties.setNumPlayersJoined( this.m_contestants.size() );
}
/**
* starts the match of 163 for this room
*/
public void startMatch() {
//TODO notify users match is starting
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeMatchStarting( c.getCommunicator() );
}
this.m_roundsLeft = this.m_rules.getNumRounds();
new BreakTimer( this.m_rules.getBreakDuration() ).start();
}
/**
* starts a new round of 163
*/
protected void startRound() {
System.out.println( "Round has started" );
//generate a problem
int randomSeed = ( int )( Math.random() * 100000 );
try {
Process runGenerator = Runtime.getRuntime().exec( "./a.out " + ONE_SIXTY_THREE + " " + NUM_CARDS + " " + randomSeed );
BufferedReader cardReader = new BufferedReader( new InputStreamReader( runGenerator.getInputStream() ) );
//read in the cards generated by the generator
for ( int i = 0 ; i < this.m_cards.length ; i++ ) {
this.m_cards [ i ] = Integer.parseInt( cardReader.readLine() );
}
//close inputs
cardReader.close();
} catch ( IOException e ) {
//TODO notify users of internal error
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeInternalError( c.getCommunicator() );
}
return;
}
//TODO send the problem to all the players
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeNewProblem( c.getCommunicator() , this.m_cards );
}
//start the timer
this.m_timer = new CountdownTimer( this.m_rules.getRoundDuration() );
this.m_timer.start();
//DEBUG
for ( int i = 0 ; i < this.m_cards.length ; i++ ) {
System.out.print( this.m_cards[ i ] + " ");
}
System.out.println();
//update number of rounds left
this.m_roundsLeft--;
}
/**
* ends a round of 163
*/
protected void endRound() {
//TODO notify users that round is over, no more solutions accepted
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeRoundOver( c.getCommunicator() );
}
System.out.println( "Round is over" );
this.m_timer.cancel();
//pause briefly between rounds
if ( this.m_roundsLeft > 0 ) {
new BreakTimer( this.m_rules.getBreakDuration() ).start();
} else {
endMatch();
}
}
/**
* ends a match of 163
*/
protected void endMatch() {
System.out.println( "Match is over" );
//TODO notify users that match is over
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeMatchOver( c.getCommunicator() );
}
//TODO update user ratings
}
/**
* checks a solution to 163
*/
public void checkSolution( Contestant submitter , String solution ) {
try {
//determine the cards that must be used
String cards = "";
for ( int i = 0 ; i < this.m_cards.length ; i++ ) {
cards += this.m_cards[ i ] + " ";
}
cards = cards.trim();
//check the user's solution
Process runChecker = Runtime.getRuntime().exec( "java -jar checker.jar " + ONE_SIXTY_THREE + " " + NUM_CARDS + " " + cards + " " + solution );
runChecker.waitFor();
//notify user of result
if ( runChecker.exitValue() == 0 ) {
//TODO notify all other users of valid solution
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeAnswerAccepted( c.getCommunicator() , submitter.getDisplayName() );
}
System.out.println( "ok" );
//update contestant score
submitter.incrementScore();
endRound();
} else {
//TODO notify users of failed solution
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeAnswerRejected( c.getCommunicator() , c.getDisplayName() , solution );
}
System.out.println( "failed" );
}
} catch ( IOException e ) {
//TODO notify user of internal error
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeInternalError( c.getCommunicator() );
}
} catch ( InterruptedException e ) {
//TODO notify user of internal error
for ( Contestant c : this.m_contestants ) {
MessageWriter.writeInternalError( c.getCommunicator() );
}
}
}
/**
* Timer that counts down to 0 by the second
*/
private class CountdownTimer extends Timer {
/**
* seconds left before the timer times out
*/
protected int m_seconds;
/**
* creates a timer that ticks off a specified number of seconds
* and performs an action upon timing out
*
* @param seconds number of seconds for the timer to count down
*/
public CountdownTimer( int seconds ) {
super();
this.m_seconds = seconds;
}
/**
* starts this timer
*/
public void start() {
TimerTask task = new TimerTask() {
@Override
public void run() {
//if there are no seconds left, then this timer
//has timed out
if ( CountdownTimer.this.m_seconds <= 0 ) {
CountdownTimer.this.handleTimeout();
CountdownTimer.this.cancel();
}
CountdownTimer.this.m_seconds --;
}
};
super.schedule( task , 0 , 1000 );
}
/**
* @return the number of seconds left before this timer times out
*/
//TODO Use this in determining rating
@SuppressWarnings("unused")
public int getSecondsLeft() {
return this.m_seconds;
}
/**
* performs a task once the timer ticks to 0
*/
public void handleTimeout() {
endRound();
}
}
/**
* timer for keeping track of breaks between successive rounds
*/
private class BreakTimer extends CountdownTimer {
/**
* creates a round pause timer that ticks down to 0 by the second
* between rounds of 163
*
* @param seconds number of seconds to tick
*/
public BreakTimer(int seconds) {
super(seconds);
}
@Override
public void handleTimeout() {
startRound();
}
}
/**
* stores information about the contestants in each room
*/
private static class Contestant {
/**
* the connection to the client
*/
final private Communicator m_connection;
/**
* display name of this contestant
*/
final private String m_displayName;
/**
* rounds of 163 won
*/
private int m_score = 0;
/**
* creates the Contestant object for a player
*
* @param displayName the display name of the player
*/
public Contestant( Communicator clientConnection , String displayName ) {
this.m_connection = clientConnection;
this.m_displayName = displayName;
}
/**
* @return the connection between the server and this contestant's client
*/
public Communicator getCommunicator() {
return this.m_connection;
}
/**
* @return the display name of this contestant
*/
public String getDisplayName() {
return this.m_displayName;
}
/**
* @return the number of rounds of 163 this contestant has won
*/
//TODO use this in determining rating
@SuppressWarnings("unused")
public int getScore() {
return this.m_score;
}
/**
* adds one to this contestant's score
*/
public void incrementScore() {
this.m_score++;
}
}
//DEBUG
final public static void main( String[] args ) {
Room r = new Room( new RoomProperties( 1 , "Mickey's Room" ) , new RoomRules( 1 , 10 , 3 , 4 , false ) );
r.startMatch();
Scanner s = new Scanner( System.in );
while ( true ) {
r.checkSolution( new Contestant( new Communicator() , "test" ) , s.nextLine() );
}
}//*/
}
| |
/*
* Copyright (c) 2015 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.policy.mgt.core.mgt.impl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.device.mgt.common.Feature;
import org.wso2.carbon.policy.mgt.common.FeatureManagementException;
import org.wso2.carbon.policy.mgt.common.Profile;
import org.wso2.carbon.policy.mgt.common.ProfileFeature;
import org.wso2.carbon.policy.mgt.core.dao.FeatureDAO;
import org.wso2.carbon.policy.mgt.core.dao.FeatureManagerDAOException;
import org.wso2.carbon.policy.mgt.core.dao.PolicyManagementDAOFactory;
import org.wso2.carbon.policy.mgt.core.dao.PolicyManagerDAOException;
import org.wso2.carbon.policy.mgt.core.mgt.FeatureManager;
import java.util.List;
public class FeatureManagerImpl implements FeatureManager {
private FeatureDAO featureDAO;
private static Log log = LogFactory.getLog(FeatureManagerImpl.class);
public FeatureManagerImpl() {
featureDAO = PolicyManagementDAOFactory.getFeatureDAO();
}
/*@Override
public Feature addFeature(Feature feature) throws FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
feature = featureDAO.addFeature(feature);
PolicyManagementDAOFactory.commitTransaction();
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding feature (" + feature.getName() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding feature (" + feature.getName() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return feature;
}*/
/*@Override
public List<Feature> addFeatures(List<Feature> features) throws FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
features = featureDAO.addFeatures(features);
PolicyManagementDAOFactory.commitTransaction();
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding feature (" + features.size()+ ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding feature (" + features.size() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return features;
}*/
/* @Override
public Feature updateFeature(Feature feature) throws FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
feature = featureDAO.updateFeature(feature);
PolicyManagementDAOFactory.commitTransaction();
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while updating feature (" + feature.getName() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while updating feature (" + feature.getName() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return feature;
}*/
@Override
public boolean deleteFeature(Feature feature) throws FeatureManagementException {
boolean bool;
try {
PolicyManagementDAOFactory.beginTransaction();
bool = featureDAO.deleteFeature(feature.getId());
PolicyManagementDAOFactory.commitTransaction();
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while deleting the feature (" + feature.getName() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while deleting the feature (" + feature.getName() + ") from database";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return bool;
}
@Override
public ProfileFeature addProfileFeature(ProfileFeature feature, int profileId) throws FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
feature = featureDAO.addProfileFeature(feature, profileId);
PolicyManagementDAOFactory.commitTransaction();
} catch (PolicyManagerDAOException e) {
String msg = "Error occurred while adding profile feature (" +
feature.getFeatureCode() + " - " + profileId + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding profile feature (" +
feature.getFeatureCode() + " - " + profileId + ") to database.";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return feature;
}
@Override
public ProfileFeature updateProfileFeature(ProfileFeature feature, int profileId) throws
FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
feature = featureDAO.updateProfileFeature(feature, profileId);
PolicyManagementDAOFactory.commitTransaction();
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while updating feature (" +
feature.getFeatureCode() + " - " + profileId + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while updating feature (" +
feature.getFeatureCode() + " - " + profileId + ") in database.";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return feature;
}
@Override
public List<ProfileFeature> addProfileFeatures(List<ProfileFeature> features, int profileId) throws
FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
features = featureDAO.addProfileFeatures(features, profileId);
PolicyManagementDAOFactory.commitTransaction();
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding the features to profile id (" + profileId + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while adding the features to profile id (" + profileId + ") to the database";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return features;
}
@Override
public List<ProfileFeature> updateProfileFeatures(List<ProfileFeature> features, int profileId) throws
FeatureManagementException {
try {
PolicyManagementDAOFactory.beginTransaction();
features = featureDAO.updateProfileFeatures(features, profileId);
PolicyManagementDAOFactory.commitTransaction();
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while updating the features to profile id (" + profileId + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while updating the features to profile id (" + profileId + ") to the database";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return features;
}
@Override
public List<Feature> getAllFeatures(String deviceType) throws FeatureManagementException {
try {
return featureDAO.getAllFeatures(deviceType);
} catch (FeatureManagerDAOException e) {
String msg = "Error occurred while getting the features.";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
}
@Override
public List<ProfileFeature> getFeaturesForProfile(int profileId) throws FeatureManagementException {
try {
return featureDAO.getFeaturesForProfile(profileId);
} catch (FeatureManagerDAOException e) {
String msg = "Error occurred while getting the features.";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
}
@Override
public boolean deleteFeature(int featureId) throws FeatureManagementException {
boolean bool;
try {
PolicyManagementDAOFactory.beginTransaction();
bool = featureDAO.deleteFeature(featureId);
PolicyManagementDAOFactory.commitTransaction();
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while deleting the feature - id (" + featureId + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while deleting the feature - id (" + featureId + ") from database.";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return bool;
}
@Override
public boolean deleteFeaturesOfProfile(Profile profile) throws FeatureManagementException {
boolean bool;
try {
PolicyManagementDAOFactory.beginTransaction();
bool = featureDAO.deleteFeaturesOfProfile(profile);
PolicyManagementDAOFactory.commitTransaction();
} catch (FeatureManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while deleting the feature of - profile (" + profile.getProfileName() + ")";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
} catch (PolicyManagerDAOException e) {
try {
PolicyManagementDAOFactory.rollbackTransaction();
} catch (PolicyManagerDAOException e1) {
log.warn("Unable to roll back the transaction");
}
String msg = "Error occurred while deleting the feature of - profile (" +
profile.getProfileName() + ") from database";
log.error(msg, e);
throw new FeatureManagementException(msg, e);
}
return bool;
}
}
| |
//*********************************************************
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//*********************************************************
package com.microsoft.kafkaavailability.threads;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.SlidingWindowReservoir;
import com.google.gson.Gson;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import com.microsoft.kafkaavailability.*;
import com.microsoft.kafkaavailability.discovery.CommonUtils;
import com.microsoft.kafkaavailability.metrics.AvailabilityGauge;
import com.microsoft.kafkaavailability.metrics.MetricNameEncoded;
import com.microsoft.kafkaavailability.metrics.MetricNameEncodedFactory;
import com.microsoft.kafkaavailability.reporters.ScheduledReporterCollector;
import com.microsoft.kafkaavailability.properties.AppProperties;
import com.microsoft.kafkaavailability.properties.MetaDataManagerProperties;
import com.microsoft.kafkaavailability.properties.ProducerProperties;
import kafka.javaapi.TopicMetadata;
import org.apache.curator.framework.CuratorFramework;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Phaser;
import static com.microsoft.kafkaavailability.discovery.Constants.DEFAULT_ELAPSED_TIME;
public class AvailabilityThread implements Callable<Long> {
final static Logger m_logger = LoggerFactory.getLogger(AvailabilityThread.class);
private final ScheduledReporterCollector reporterCollector;
private final CuratorFramework m_curatorFramework;
private final AppProperties appProperties;
private final MetricNameEncodedFactory metricNameFactory;
private Phaser m_phaser;
private long m_threadSleepTime;
@Inject
public AvailabilityThread(CuratorFramework curatorFramework, ScheduledReporterCollector reporterCollector,
AppProperties appProperties,MetricNameEncodedFactory metricNameFactory,
@Assisted Phaser phaser, @Assisted long threadSleepTime) {
this.m_curatorFramework = curatorFramework;
this.reporterCollector = reporterCollector;
this.appProperties = appProperties;
this.metricNameFactory = metricNameFactory;
this.m_phaser = phaser;
//this.m_phaser.register(); //Registers/Add a new unArrived party to this phaser.
//CommonUtils.dumpPhaserState("After register", phaser);
this.m_threadSleepTime = threadSleepTime;
}
@Override
public Long call() throws Exception {
int sleepDuration = 1000;
long elapsedTime = 0L;
do {
long lStartTime = System.currentTimeMillis();
MetricRegistry metrics;
m_logger.info(Thread.currentThread().getName() +
" - Availability party has arrived and is working in "
+ "Phase-" + m_phaser.getPhase());
try {
metrics = reporterCollector.getRegistry();
runAvailability(metrics);
} catch (Exception e) {
m_logger.error(e.getMessage(), e);
} finally {
try {
CommonUtils.sleep(1000);
} catch (Exception e) {
m_logger.error(e.getMessage(), e);
}
}
elapsedTime = CommonUtils.stopWatch(lStartTime);
m_logger.info("Availability Elapsed: " + elapsedTime + " milliseconds.");
while (elapsedTime < m_threadSleepTime && !m_phaser.isTerminated()) {
try {
Thread.currentThread().sleep(sleepDuration);
elapsedTime = elapsedTime + sleepDuration;
} catch (InterruptedException ie) {
m_logger.error(ie.getMessage(), ie);
}
}
} while (!m_phaser.isTerminated());
m_logger.info("AvailabilityThread (run()) has been COMPLETED.");
return Long.valueOf(elapsedTime);
}
private void runAvailability(MetricRegistry metrics) throws IOException, MetaDataManagerException {
m_logger.info("Starting AvailabilityLatency");
IPropertiesManager producerPropertiesManager = new PropertiesManager<ProducerProperties>("producerProperties.json", ProducerProperties.class);
IPropertiesManager metaDataPropertiesManager = new PropertiesManager<MetaDataManagerProperties>("metadatamanagerProperties.json", MetaDataManagerProperties.class);
IMetaDataManager metaDataManager = new MetaDataManager(m_curatorFramework, metaDataPropertiesManager);
MetaDataManagerProperties metaDataProperties = (MetaDataManagerProperties) metaDataPropertiesManager.getProperties();
IProducer producer = new Producer(producerPropertiesManager, metaDataManager);
//This is full list of topics
List<TopicMetadata> totalTopicMetadata = metaDataManager.getAllTopicPartition();
List<kafka.javaapi.TopicMetadata> whiteListTopicMetadata = new ArrayList<TopicMetadata>();
for (kafka.javaapi.TopicMetadata topic : totalTopicMetadata) {
for (String whiteListTopic : metaDataProperties.canaryTestTopics)
// java string compare while ignoring case
if (topic.topic().equalsIgnoreCase(whiteListTopic)) {
whiteListTopicMetadata.add(topic);
}
}
List<String> gtmList = new ArrayList<String>();
if (!appProperties.kafkaGTMIP.isEmpty()) {
gtmList.addAll(appProperties.kafkaGTMIP);
}
List<String> vipList = new ArrayList<String>();
if (!appProperties.kafkaIP.isEmpty()) {
vipList.addAll(appProperties.kafkaIP);
}
postData("KafkaGTMIP", metrics, producer, whiteListTopicMetadata, gtmList,
appProperties.reportKafkaGTMAvailability, appProperties.sendGTMAvailabilityLatency,
appProperties.useCertificateToConnectToKafkaGTM, appProperties.keyStoreFilePath,
appProperties.keyStoreFilePassword);
postData("KafkaIP", metrics, producer, whiteListTopicMetadata, vipList,
appProperties.reportKafkaIPAvailability, appProperties.sendKafkaIPAvailabilityLatency,
appProperties.useCertificateToConnectToKafkaIP, appProperties.keyStoreFilePath,
appProperties.keyStoreFilePassword);
((MetaDataManager) metaDataManager).close();
m_logger.info("Finished AvailabilityLatency");
}
private void postData(String name, MetricRegistry metrics, IProducer producer,
List<kafka.javaapi.TopicMetadata> whiteListTopicMetadata, List<String> gtmList,
boolean reportAvailability, boolean reportLatency, boolean useCertificateToConnect,
String keyStoreFilePath, String keyStoreFilePassword) {
int numMessages = 100;
long startTime, endTime;
int failureThreshold = 10;
int windowSize = numMessages * ((whiteListTopicMetadata.size() > 0) ? (whiteListTopicMetadata.size()) : 1);
m_logger.info("Starting " + name + " prop check." + reportAvailability);
for (String gtm : gtmList) {
int gtmIPStatusTryCount = 0;
int gtmIPStatusFailCount = 0;
String authority = null;
try {
URL url = new URL(gtm);
authority = url.getAuthority();
} catch (MalformedURLException e) {
authority = gtm;
}
final SlidingWindowReservoir gtmAvailabilityLatencyWindow = new SlidingWindowReservoir(windowSize);
Histogram histogramGTMAvailabilityLatency = new Histogram(gtmAvailabilityLatencyWindow);
MetricNameEncoded gtmAvailabilityLatency = metricNameFactory.createWithVIP(name + ".Availability.Latency", authority);
if (!metrics.getNames().contains(new Gson().toJson(gtmAvailabilityLatency))) {
if (reportLatency && !gtmList.isEmpty())
metrics.register(new Gson().toJson(gtmAvailabilityLatency), histogramGTMAvailabilityLatency);
}
for (kafka.javaapi.TopicMetadata item : whiteListTopicMetadata) {
m_logger.info("Posting to Topic: {} using : {};", item.topic(), gtm);
int tryCount = 0, failCount = 0;
for (int i = 0; i < numMessages; i++) {
if (reportAvailability) {
startTime = System.currentTimeMillis();
try {
tryCount++;
producer.sendCanaryToKafkaIP(gtm, item.topic(), useCertificateToConnect, keyStoreFilePath, keyStoreFilePassword);
endTime = System.currentTimeMillis();
} catch (Exception e) {
failCount++;
m_logger.error(name + " -- Error Writing to Topic: {} using : {}; Exception: {}", item.topic(), gtm, e);
endTime = System.currentTimeMillis() + DEFAULT_ELAPSED_TIME;
}
histogramGTMAvailabilityLatency.update(endTime - startTime);
}
if (failCount >= 10) {
m_logger.error(name + ": {} has failed more than {} times. Giving up!!!.", gtm, failureThreshold);
tryCount = failCount = 100;
break;
}
}
gtmIPStatusTryCount = gtmIPStatusTryCount + tryCount;
gtmIPStatusFailCount = gtmIPStatusFailCount + failCount;
}
if (reportAvailability && !gtmList.isEmpty()) {
m_logger.info("About to report " + name + "Availability-- TryCount:" + gtmIPStatusTryCount + " FailCount:" + gtmIPStatusFailCount);
MetricNameEncoded kafkaGTMIPAvailability = metricNameFactory.createWithVIP(name + ".Availability", authority);
if (!metrics.getNames().contains(new Gson().toJson(kafkaGTMIPAvailability))) {
metrics.register(new Gson().toJson(kafkaGTMIPAvailability), new AvailabilityGauge(gtmIPStatusTryCount, gtmIPStatusTryCount - gtmIPStatusFailCount));
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.reflection;
import org.codehaus.groovy.GroovyBugError;
import org.codehaus.groovy.runtime.MetaClassHelper;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import org.codehaus.groovy.runtime.wrappers.Wrapper;
import java.lang.reflect.Array;
public class ParameterTypes {
protected volatile Class[] nativeParamTypes;
protected volatile CachedClass[] parameterTypes;
protected boolean isVargsMethod;
public ParameterTypes() {
}
public ParameterTypes(Class[] pt) {
nativeParamTypes = pt;
}
public ParameterTypes(String[] pt) {
nativeParamTypes = new Class[pt.length];
for (int i = 0; i != pt.length; ++i) {
try {
nativeParamTypes[i] = Class.forName(pt[i]);
} catch (ClassNotFoundException e) {
NoClassDefFoundError err = new NoClassDefFoundError();
err.initCause(e);
throw err;
}
}
}
public ParameterTypes(CachedClass[] parameterTypes) {
setParametersTypes(parameterTypes);
}
protected final void setParametersTypes(CachedClass[] pt) {
this.parameterTypes = pt;
isVargsMethod = pt.length > 0 && pt[pt.length - 1].isArray;
}
public CachedClass[] getParameterTypes() {
if (parameterTypes == null) {
getParametersTypes0();
}
return parameterTypes;
}
private synchronized void getParametersTypes0() {
if (parameterTypes != null)
return;
Class[] npt = nativeParamTypes == null ? getPT() : nativeParamTypes;
if (npt.length == 0) {
nativeParamTypes = MetaClassHelper.EMPTY_TYPE_ARRAY;
setParametersTypes(CachedClass.EMPTY_ARRAY);
} else {
CachedClass[] pt = new CachedClass[npt.length];
for (int i = 0; i != npt.length; ++i)
pt[i] = ReflectionCache.getCachedClass(npt[i]);
nativeParamTypes = npt;
setParametersTypes(pt);
}
}
public Class[] getNativeParameterTypes() {
if (nativeParamTypes == null) {
getNativeParameterTypes0();
}
return nativeParamTypes;
}
private synchronized void getNativeParameterTypes0() {
if (nativeParamTypes != null)
return;
Class[] npt;
if (parameterTypes != null) {
npt = new Class[parameterTypes.length];
for (int i = 0; i != parameterTypes.length; ++i) {
npt[i] = parameterTypes[i].getTheClass();
}
} else
npt = getPT();
nativeParamTypes = npt;
}
protected Class[] getPT() {
throw new UnsupportedOperationException(getClass().getName());
}
public boolean isVargsMethod() {
return isVargsMethod;
}
public boolean isVargsMethod(Object[] arguments) {
// Uncomment if at some point this method can be called before parameterTypes initialized
// getParameterTypes();
if (!isVargsMethod)
return false;
final int lenMinus1 = parameterTypes.length - 1;
// -1 because the varg part is optional
if (lenMinus1 == arguments.length) return true;
if (lenMinus1 > arguments.length) return false;
if (arguments.length > parameterTypes.length) return true;
// only case left is arguments.length == parameterTypes.length
Object last = arguments[arguments.length - 1];
if (last == null) return true;
Class clazz = last.getClass();
return !clazz.equals(parameterTypes[lenMinus1].getTheClass());
}
public final Object[] coerceArgumentsToClasses(Object[] argumentArray) {
// Uncomment if at some point this method can be called before parameterTypes initialized
// getParameterTypes();
argumentArray = correctArguments(argumentArray);
final CachedClass[] pt = parameterTypes;
final int len = argumentArray.length;
for (int i = 0; i < len; i++) {
final Object argument = argumentArray[i];
if (argument != null) {
argumentArray[i] = pt[i].coerceArgument(argument);
}
}
return argumentArray;
}
public Object[] correctArguments(Object[] argumentArray) {
// correct argumentArray's length
if (argumentArray == null) {
return MetaClassHelper.EMPTY_ARRAY;
}
final CachedClass[] pt = getParameterTypes();
if (pt.length == 1 && argumentArray.length == 0) {
if (isVargsMethod)
return new Object[]{Array.newInstance(pt[0].getTheClass().getComponentType(), 0)};
else
return MetaClassHelper.ARRAY_WITH_NULL;
}
if (isVargsMethod && isVargsMethod(argumentArray)) {
return fitToVargs(argumentArray, pt);
}
return argumentArray;
}
/**
* this method is called when the number of arguments to a method is greater than 1
* and if the method is a vargs method. This method will then transform the given
* arguments to make the method callable
*
* @param argumentArrayOrig the arguments used to call the method
* @param paramTypes the types of the parameters the method takes
*/
private static Object[] fitToVargs(Object[] argumentArrayOrig, CachedClass[] paramTypes) {
Class vargsClassOrig = paramTypes[paramTypes.length - 1].getTheClass().getComponentType();
Class vargsClass = ReflectionCache.autoboxType(vargsClassOrig);
Object[] argumentArray = argumentArrayOrig.clone();
MetaClassHelper.unwrap(argumentArray);
if (argumentArray.length == paramTypes.length - 1) {
// the vargs argument is missing, so fill it with an empty array
Object[] newArgs = new Object[paramTypes.length];
System.arraycopy(argumentArray, 0, newArgs, 0, argumentArray.length);
Object vargs = Array.newInstance(vargsClass, 0);
newArgs[newArgs.length - 1] = vargs;
return newArgs;
} else if (argumentArray.length == paramTypes.length) {
// the number of arguments is correct, but if the last argument
// is no array we have to wrap it in a array. If the last argument
// is null, then we don't have to do anything
Object lastArgument = argumentArray[argumentArray.length - 1];
if (lastArgument != null && !lastArgument.getClass().isArray()) {
// no array so wrap it
Object wrapped = makeCommonArray(argumentArray, paramTypes.length - 1, vargsClass);
Object[] newArgs = new Object[paramTypes.length];
System.arraycopy(argumentArray, 0, newArgs, 0, paramTypes.length - 1);
newArgs[newArgs.length - 1] = wrapped;
return newArgs;
} else {
// we may have to box the argument!
return argumentArray;
}
} else if (argumentArray.length > paramTypes.length) {
// the number of arguments is too big, wrap all exceeding elements
// in an array, but keep the old elements that are no vargs
Object[] newArgs = new Object[paramTypes.length];
// copy arguments that are not a varg
System.arraycopy(argumentArray, 0, newArgs, 0, paramTypes.length - 1);
// create a new array for the vargs and copy them
Object vargs = makeCommonArray(argumentArray, paramTypes.length - 1, vargsClass);
newArgs[newArgs.length - 1] = vargs;
return newArgs;
} else {
throw new GroovyBugError("trying to call a vargs method without enough arguments");
}
}
private static Object makeCommonArray(Object[] arguments, int offset, Class baseClass) {
Object[] result = (Object[]) Array.newInstance(baseClass, arguments.length - offset);
for (int i = offset; i < arguments.length; i++) {
Object v = arguments[i];
v = DefaultTypeTransformation.castToType(v, baseClass);
result[i - offset] = v;
}
return result;
}
public boolean isValidMethod(Class[] arguments) {
if (arguments == null) return true;
final int size = arguments.length;
CachedClass[] pt = getParameterTypes();
final int paramMinus1 = pt.length - 1;
if (isVargsMethod && size >= paramMinus1)
return isValidVarargsMethod(arguments, size, pt, paramMinus1);
else if (pt.length == size)
return isValidExactMethod(arguments, pt);
else if (pt.length == 1 && size == 0 && !pt[0].isPrimitive)
return true;
return false;
}
private static boolean isValidExactMethod(Class[] arguments, CachedClass[] pt) {
// lets check the parameter types match
int size = pt.length;
for (int i = 0; i < size; i++) {
if (!pt[i].isAssignableFrom(arguments[i])) {
return false;
}
}
return true;
}
public boolean isValidExactMethod(Object[] args) {
// lets check the parameter types match
getParametersTypes0();
int size = args.length;
if (size != parameterTypes.length)
return false;
for (int i = 0; i < size; i++) {
if (args[i] != null && !parameterTypes[i].isAssignableFrom(args[i].getClass())) {
return false;
}
}
return true;
}
public boolean isValidExactMethod(Class[] args) {
// lets check the parameter types match
getParametersTypes0();
int size = args.length;
if (size != parameterTypes.length)
return false;
for (int i = 0; i < size; i++) {
if (args[i] != null && !parameterTypes[i].isAssignableFrom(args[i])) {
return false;
}
}
return true;
}
private static boolean testComponentAssignable(Class toTestAgainst, Class toTest) {
Class component = toTest.getComponentType();
if (component == null) return false;
return MetaClassHelper.isAssignableFrom(toTestAgainst, component);
}
private static boolean isValidVarargsMethod(Class[] arguments, int size, CachedClass[] pt, int paramMinus1) {
// first check normal number of parameters
for (int i = 0; i < paramMinus1; i++) {
if (pt[i].isAssignableFrom(arguments[i])) continue;
return false;
}
// check direct match
CachedClass varg = pt[paramMinus1];
Class clazz = varg.getTheClass().getComponentType();
if (size == pt.length &&
(varg.isAssignableFrom(arguments[paramMinus1]) ||
testComponentAssignable(clazz, arguments[paramMinus1]))) {
return true;
}
// check varged
for (int i = paramMinus1; i < size; i++) {
if (MetaClassHelper.isAssignableFrom(clazz, arguments[i])) continue;
return false;
}
return true;
}
public boolean isValidMethod(Object[] arguments) {
if (arguments == null) return true;
final int size = arguments.length;
CachedClass[] paramTypes = getParameterTypes();
final int paramMinus1 = paramTypes.length - 1;
if (size >= paramMinus1 && paramTypes.length > 0 &&
paramTypes[(paramMinus1)].isArray) {
// first check normal number of parameters
for (int i = 0; i < paramMinus1; i++) {
if (paramTypes[i].isAssignableFrom(getArgClass(arguments[i]))) continue;
return false;
}
// check direct match
CachedClass varg = paramTypes[paramMinus1];
Class clazz = varg.getTheClass().getComponentType();
if (size == paramTypes.length &&
(varg.isAssignableFrom(getArgClass(arguments[paramMinus1])) ||
testComponentAssignable(clazz, getArgClass(arguments[paramMinus1])))) {
return true;
}
// check varged
for (int i = paramMinus1; i < size; i++) {
if (MetaClassHelper.isAssignableFrom(clazz, getArgClass(arguments[i]))) continue;
return false;
}
return true;
} else if (paramTypes.length == size) {
// lets check the parameter types match
for (int i = 0; i < size; i++) {
if (paramTypes[i].isAssignableFrom(getArgClass(arguments[i]))) continue;
return false;
}
return true;
} else if (paramTypes.length == 1 && size == 0 && !paramTypes[0].isPrimitive) {
return true;
}
return false;
}
private static Class getArgClass(Object arg) {
Class cls;
if (arg == null) {
cls = null;
} else {
if (arg instanceof Wrapper) {
cls = ((Wrapper) arg).getType();
} else
cls = arg.getClass();
}
return cls;
}
}
| |
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.award.web.struts.action;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.kuali.kra.award.AwardForm;
import org.kuali.kra.award.AwardNumberService;
import org.kuali.kra.award.awardhierarchy.AwardHierarchy;
import org.kuali.kra.award.awardhierarchy.AwardHierarchyTempObject;
import org.kuali.kra.award.awardhierarchy.sync.AwardSyncChange;
import org.kuali.kra.award.awardhierarchy.sync.AwardSyncPendingChangeBean;
import org.kuali.kra.award.awardhierarchy.sync.AwardSyncType;
import org.kuali.kra.award.document.AwardDocument;
import org.kuali.kra.award.home.Award;
import org.kuali.kra.award.home.ValidRates;
import org.kuali.kra.award.home.fundingproposal.AwardFundingProposal;
import org.kuali.kra.award.notification.AwardNotificationContext;
import org.kuali.kra.award.printing.AwardPrintParameters;
import org.kuali.kra.award.printing.AwardPrintType;
import org.kuali.kra.award.printing.service.AwardPrintingService;
import org.kuali.kra.external.award.AccountCreationClient;
import org.kuali.kra.external.award.AwardAccountValidationService;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KeyConstants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposal;
import org.kuali.kra.institutionalproposal.service.InstitutionalProposalService;
import org.kuali.kra.proposaldevelopment.bo.AttachmentDataSource;
import org.kuali.kra.service.VersionHistoryService;
import org.kuali.kra.timeandmoney.AwardHierarchyNode;
import org.kuali.kra.web.struts.action.AuditActionHelper;
import org.kuali.rice.core.api.util.RiceConstants;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.kew.api.WorkflowDocument;
import org.kuali.rice.kew.api.exception.WorkflowException;
import org.kuali.rice.kns.question.ConfirmationQuestion;
import org.kuali.rice.kns.web.struts.action.AuditModeAction;
import org.kuali.rice.kns.web.struts.form.KualiDocumentFormBase;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* This class represents the Struts Action for Award Actions page(AwardActions.jsp)
*/
public class AwardActionsAction extends AwardAction implements AuditModeAction {
private static final String ZERO = "0";
private static final String NEW_CHILD_SELECTED_AWARD_OPTION = "c";
private static final String NEW_CHILD_COPY_FROM_PARENT_OPTION = "b";
private static final String ERROR_CANCEL_PENDING_PROPOSALS = "error.cancel.fundingproposal.pendingVersion";
private static final String ACCOUNT_ALREADY_CREATED = "error.award.createAccount.account.already.created";
private static final String NO_PERMISSION_TO_CREATE_ACCOUNT = "error.award.createAccount.noPermission";
public static final String NEW_CHILD_NEW_OPTION = "a";
public static final String AWARD_COPY_NEW_OPTION = "a";
public static final String AWARD_COPY_CHILD_OF_OPTION = "d";
@Override
public ActionForward docHandler(ActionMapping mapping, ActionForm form
, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
String command = request.getParameter(KewApiConstants.COMMAND_PARAMETER);
ActionForward forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
if(StringUtils.isNotEmpty(command) && KewApiConstants.DOCSEARCH_COMMAND.equals(command)) {
loadDocumentInForm(request, awardForm);
WorkflowDocument workflowDoc = awardForm.getAwardDocument().getDocumentHeader().getWorkflowDocument();
if(workflowDoc != null) {
awardForm.setDocTypeName(workflowDoc.getDocumentTypeName());
}
request.setAttribute("selectedAwardNumber", awardForm.getAwardDocument().getAward().getAwardNumber());
} else {
forward = super.docHandler(mapping, awardForm, request, response);
}
populateAwardHierarchy(form);
return forward;
}
@Override
public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionForward forward = super.execute(mapping, form, request, response);
AwardForm awardForm = (AwardForm)form;
String command = request.getParameter("command");
String awardDocumentNumber = request.getParameter("awardDocumentNumber");
String awardNumber = request.getParameter("awardNumber");
if (StringUtils.isNotBlank(command) && "redirectAwardHierarchyFullViewForPopup".equals(command)) {
forward = redirectAwardHierarchyFullViewForPopup(mapping, form, request, response, awardDocumentNumber, awardNumber);
}
return forward;
}
/**
*
* This method is for the 'open window' button. It will be forwarded AwardHierarchyFullView.jsp
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
// public ActionForward openWindow(ActionMapping mapping, ActionForm form, HttpServletRequest request,
// HttpServletResponse response) throws Exception {
// String documentNumber = request.getParameter("awardDocumentNumber");
// String awardNumber = request.getParameter("awardNumber");
// Award award = getActiveAwardVersion(awardNumber);
// AwardForm awardForm = (AwardForm)form;
// awardForm.setCurrentAwardNumber(awardNumber);
// awardForm.setCurrentSeqNumber(award.getSequenceNumber().toString());
// DocumentService documentService = KraServiceLocator.getService(DocumentService.class);
// AwardDocument awardDocument = (AwardDocument)documentService.getByDocumentHeaderId(documentNumber);
// awardDocument.setAward(award);
// awardForm.setDocument(awardDocument);
// super.populateAwardHierarchy(awardForm);
// return mapping.findForward("basic");
// }
private ActionForward redirectAwardHierarchyFullViewForPopup(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response, String awardDocumentNumber, String awardNumber) throws Exception {
//super.populateAwardHierarchy(form);
AwardForm awardForm = (AwardForm)form;
response.sendRedirect("awardHierarchyFullView.do?methodToCall=openWindow&awardDocumentNumber=" + awardDocumentNumber + "&awardNumber=" + awardNumber + "&docTypeName=" + awardForm.getDocTypeName());
return null;
}
@Override
protected void validateLookupInquiryFullParameter(HttpServletRequest request, ActionForm form, String fullParameter) {
if(fullParameter.startsWith("methodToCall.performLookup.(!!org.kuali.kra.award.home.Award!!).(((awardNumber:awardHierarchyTempObject")) {
return;
} else {
super.validateLookupInquiryFullParameter(request,form,fullParameter);
}
}
/** {@inheritDoc} */
public ActionForward activate(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
awardForm.setUnitRulesMessages(getUnitRulesMessages(awardForm.getAwardDocument()));
return new AuditActionHelper().setAuditMode(mapping, (AwardForm) form, true);
}
/** {@inheritDoc} */
public ActionForward deactivate(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
((AwardForm) form).clearUnitRulesMessages();
return new AuditActionHelper().setAuditMode(mapping, (AwardForm) form, false);
}
/**
*
* This method corresponds copy award action on Award Hierarchy UI. Depending on various options selected appropriate helper methods get called.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward copyAward(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm)form;
String awardNumber = getAwardNumber(request);
/**
* Based on IU Customization: UITSRA-1239
* ### Vivantech Fix : #151 / [#90223952] revise award numbering */
int index = 0;
Matcher awardNumMatcher = Pattern.compile("\\d+-0*([1-9]\\d*)").matcher(awardNumber);
if(awardNumMatcher.find()) {
index = Integer.parseInt(awardNumMatcher.group(1));
}
/** End IU Customization */
ActionForward forward = null;
AwardHierarchy newRootNode = null;
if (!StringUtils.isEmpty(awardForm.getAwardHierarchyTempObjects().get(index).getCopyAwardRadio())) {
String radio = awardForm.getAwardHierarchyTempObjects().get(index).getCopyAwardRadio();
Boolean copyDescendants = awardForm.getAwardHierarchyTempObjects().get(index).getCopyDescendants();
AwardHierarchy targetNode = findTargetNode(request, awardForm);
if (StringUtils.equalsIgnoreCase(radio, AWARD_COPY_NEW_OPTION)) {
if (copyDescendants!=null && copyDescendants) {
newRootNode = awardForm.getAwardHierarchyBean().copyAwardAndAllDescendantsAsNewHierarchy(targetNode.getAwardNumber());
forward = prepareToForwardToNewFinalChildAward(mapping, awardForm, request, response, targetNode, newRootNode);
} else {
newRootNode = awardForm.getAwardHierarchyBean().copyAwardAsNewHierarchy(targetNode.getAwardNumber());
forward = prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newRootNode);
}
} else if(StringUtils.equalsIgnoreCase(radio, AWARD_COPY_CHILD_OF_OPTION)) {
String awardNumberOfNodeToBeParent = awardForm.getAwardHierarchyTempObjects().get(index).getCopyAwardPanelTargetAward();
if (!StringUtils.isEmpty(awardNumberOfNodeToBeParent) && !StringUtils.equalsIgnoreCase(awardNumberOfNodeToBeParent, ZERO)) {
if (copyDescendants!=null && copyDescendants){
if(!StringUtils.isEmpty(awardNumberOfNodeToBeParent)) {
newRootNode = awardForm.getAwardHierarchyBean().copyAwardAndDescendantsAsChildOfAnotherAward(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
forward = prepareToForwardToNewFinalChildAward(mapping, awardForm, request, response, targetNode, newRootNode);
}
} else {
newRootNode = awardForm.getAwardHierarchyBean().copyAwardAsChildOfAnotherAward(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
forward = prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newRootNode);
}
}else{
GlobalVariables.getMessageMap().putError("awardHierarchyTempObject[" + index + "].copyAwardPanelTargetAward", KeyConstants.ERROR_COPY_AWARD_CHILDOF_AWARD_NOT_SELECTED, awardNumber);
awardForm.getFundingProposalBean().setAllAwardsForAwardNumber(null);
forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
}
}else{
GlobalVariables.getMessageMap().putError("awardHierarchyTempObject[" + index + "].copyAwardPanelTargetAward", KeyConstants.ERROR_COPY_AWARD_NO_OPTION_SELECTED, awardNumber);
forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
return forward;
}
/**
*
* This method corresponds to the Create New Child behavior on Award Hierarchy JQuery UI. It calls various helper methods based on the options
* selected in the UI.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward create(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm)form;
String awardNumber = getAwardNumber(request);
/** Based on IU Customization: UITSRA-1239
* ### Vivantech Fix : #151 / [#90223952] revise award numbering **/
int index = 0;
Matcher awardNumMatcher = Pattern.compile("\\d+-0*([1-9]\\d*)").matcher(awardNumber);
if(awardNumMatcher.find()) {
index = Integer.parseInt(awardNumMatcher.group(1));
}
/** End IU Customization */
ActionForward forward = null;
if(awardForm.getAwardHierarchyTempObjects().get(index).getCreateNewChildRadio()!=null){
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String radio = awardForm.getAwardHierarchyTempObjects().get(index).getCreateNewChildRadio();
if(StringUtils.equalsIgnoreCase(radio, NEW_CHILD_NEW_OPTION)){
AwardHierarchy newChildNode = awardForm.getAwardHierarchyBean().createNewChildAward(targetNode.getAwardNumber());
forward = prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newChildNode);
}else if(StringUtils.equalsIgnoreCase(radio, NEW_CHILD_COPY_FROM_PARENT_OPTION)){
AwardHierarchy newChildNode = awardForm.getAwardHierarchyBean().createNewAwardBasedOnParent(targetNode.getAwardNumber());
forward = prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newChildNode);
}else if(StringUtils.equalsIgnoreCase(radio, NEW_CHILD_SELECTED_AWARD_OPTION)){
String awardNumberOfNodeToCopyFrom = awardForm.getAwardHierarchyTempObjects().get(index).getNewChildPanelTargetAward();
if (StringUtils.isEmpty(awardNumberOfNodeToCopyFrom) || StringUtils.equalsIgnoreCase(awardNumberOfNodeToCopyFrom, ZERO)) {
GlobalVariables.getMessageMap().putError("awardHierarchyTempObject[" + index + "].newChildPanelTargetAward", KeyConstants.ERROR_CREATE_NEW_CHILD_OTHER_AWARD_NOT_SELECTED, awardNumber);
forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}else{
AwardHierarchy newChildNode = awardForm.getAwardHierarchyBean().createNewChildAwardBasedOnAnotherAwardInHierarchy(
awardNumberOfNodeToCopyFrom, targetNode.getAwardNumber());
forward = prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newChildNode);
}
}
}else{
GlobalVariables.getMessageMap().putError("awardHierarchyTempObject[" + index + "].newChildPanelTargetAward", KeyConstants.ERROR_CREATE_NEW_CHILD_NO_OPTION_SELECTED, awardNumber);
forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
return forward;
}
/**
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward createANewChildAward(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
AwardHierarchy newChildNode = awardForm.getAwardHierarchyBean().createNewChildAward(targetNode.getAwardNumber());
return prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newChildNode);
}
/**
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward createANewChildAwardBasedOnParent(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
AwardHierarchy newChildNode = awardForm.getAwardHierarchyBean().createNewAwardBasedOnParent(targetNode.getAwardNumber());
return prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newChildNode);
}
public ActionForward createANewChildAwardBasedOnAnotherAwardInHierarchy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
String awardNumberOfNodeToCopyFrom = getHierarchyTargetAwardNumber(request);
if(StringUtils.isEmpty(awardNumberOfNodeToCopyFrom)) {
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
AwardHierarchy newChildNode = awardForm.getAwardHierarchyBean().createNewChildAwardBasedOnAnotherAwardInHierarchy(awardNumberOfNodeToCopyFrom,
targetNode.getAwardNumber());
return prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newChildNode);
}
public ActionForward copyAwardAsANewHierarchy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
AwardHierarchy newRootNode = awardForm.getAwardHierarchyBean().copyAwardAsNewHierarchy(targetNode.getAwardNumber());
return prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newRootNode);
}
public ActionForward copyAwardAsANewHierarchyWithDescendants(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
AwardHierarchy newRootNode = awardForm.getAwardHierarchyBean().copyAwardAndAllDescendantsAsNewHierarchy(targetNode.getAwardNumber());
return prepareToForwardToNewChildAward(mapping, awardForm, targetNode, newRootNode);
}
public ActionForward copyAwardAsChildOfAnotherAward(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String awardNumberOfNodeToBeParent = getHierarchyTargetAwardNumber(request);
awardForm.getAwardHierarchyBean().copyAwardAsChildOfAnotherAward(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
populateAwardHierarchy(awardForm);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward copyAwardAndDescendantsAsChildOfAnotherAward(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String awardNumberOfNodeToBeParent = getHierarchyTargetAwardNumber(request);
if(!StringUtils.isEmpty(awardNumberOfNodeToBeParent)) {
awardForm.getAwardHierarchyBean().copyAwardAndDescendantsAsChildOfAnotherAward(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
}
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward copyAwardAsAChildInCurrentHierarchy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String awardNumberOfNodeToBeParent = getHierarchyTargetAwardNumber(request);
awardForm.getAwardHierarchyBean().copyAwardAsChildOfAnAwardInCurrentHierarchy(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
populateAwardHierarchy(awardForm);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward copyAwardAsAChildInCurrentHierarchyWithDescendants(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String awardNumberOfNodeToBeParent = getHierarchyTargetAwardNumber(request);
awardForm.getAwardHierarchyBean().copyAwardAndDescendantsAsChildOfAnAwardInCurrentHierarchy(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
populateAwardHierarchy(awardForm);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward copyAwardAsAChildOfAwardInAnotherHierarchy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String awardNumberOfNodeToBeParent = getHierarchyTargetAwardNumber(request);
awardForm.getAwardHierarchyBean().copyAwardAsChildOfAnAwardInAnotherHierarchy(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward copyAwardAsAChildOfAwardInAnotherHierarchyWithDescendants(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
AwardHierarchy targetNode = findTargetNode(request, awardForm);
String awardNumberOfNodeToBeParent = getHierarchyTargetAwardNumber(request);
if(!StringUtils.isEmpty(awardNumberOfNodeToBeParent)) {
awardForm.getAwardHierarchyBean().copyAwardAndDescendantsAsChildOfAnAwardInAnotherHierarchy(targetNode.getAwardNumber(), awardNumberOfNodeToBeParent);
}
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward selectAllAwardPrintNoticeItems(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm)form;
awardForm.getAwardPrintNotice().selectAllItems();
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward deselectAllAwardPrintNoticeItems(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm)form;
awardForm.getAwardPrintNotice().deselectAllItems();
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward printNotice(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
Map<String, Object> reportParameters = new HashMap<String, Object>();
reportParameters.put(AwardPrintParameters.ADDRESS_LIST
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getSponsorContacts());
reportParameters.put(AwardPrintParameters.FOREIGN_TRAVEL
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getForeignTravel());
reportParameters.put(AwardPrintParameters.REPORTING
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getReports());
reportParameters.put(AwardPrintParameters.CLOSEOUT
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getCloseout());
reportParameters.put(AwardPrintParameters.FUNDING_SUMMARY
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getFundingSummary());
reportParameters.put(AwardPrintParameters.SPECIAL_REVIEW
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getSpecialReview());
reportParameters.put(AwardPrintParameters.COMMENTS
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getComments());
reportParameters.put(AwardPrintParameters.HIERARCHY_INFO
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getHierarchy());
reportParameters.put(AwardPrintParameters.SUBCONTRACT
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getSubAward());
reportParameters.put(AwardPrintParameters.COST_SHARING
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getCostShare());
reportParameters.put(AwardPrintParameters.KEYWORDS
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getKeywords());
reportParameters.put(AwardPrintParameters.TECHNICAL_REPORTING
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getTechnicalReports());
reportParameters.put(AwardPrintParameters.EQUIPMENT
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getEquipment());
reportParameters.put(AwardPrintParameters.OTHER_DATA
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getOtherData());
reportParameters.put(AwardPrintParameters.TERMS
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getTerms());
reportParameters.put(AwardPrintParameters.FA_COST
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getFaRates());
reportParameters.put(AwardPrintParameters.PAYMENT
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getPayment());
reportParameters.put(AwardPrintParameters.FLOW_THRU
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getFlowThru());
reportParameters.put(AwardPrintParameters.PROPOSAL_DUE
.getAwardPrintParameter(), false);
//awardForm.getAwardPrintNotice().getProposalsDue());
reportParameters.put(AwardPrintParameters.SIGNATURE_REQUIRED
.getAwardPrintParameter(), awardForm.getAwardPrintNotice()
.getRequireSignature());
AwardPrintingService awardPrintService = KraServiceLocator
.getService(AwardPrintingService.class);
AttachmentDataSource dataStream = awardPrintService.printAwardReport(
awardForm.getAwardDocument().getAward(),AwardPrintType.AWARD_NOTICE_REPORT,reportParameters);
streamToResponse(dataStream, response);
//return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
return null;
}
public ActionForward printChangeReport(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
Map<String, Object> reportParameters = new HashMap<String, Object>();
reportParameters.put(AwardPrintParameters.SIGNATURE_REQUIRED
.getAwardPrintParameter(), awardForm
.getAwardPrintChangeReport().getRequireSignature());
reportParameters.put(AwardPrintParameters.SEQUENCE_NUMBER
.getAwardPrintParameter(), awardForm
.getAwardPrintChangeReport().getAwardVersion());
reportParameters.put(AwardPrintParameters.TRANSACTION_ID_INDEX
.getAwardPrintParameter(), awardForm
.getAwardPrintChangeReport().getAmountInfoIndex());
AwardPrintingService awardPrintService = KraServiceLocator
.getService(AwardPrintingService.class);
AttachmentDataSource dataStream = awardPrintService.printAwardReport(
awardForm.getAwardDocument().getAward(), AwardPrintType.AWARD_DELTA_REPORT,
reportParameters);
streamToResponse(dataStream, response);
//return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
return null;
}
public ActionForward printHierarchy(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm)form;
Map<String, Object> reportParameters = new HashMap<String, Object>();
AwardPrintingService awardPrintService = KraServiceLocator
.getService(AwardPrintingService.class);
AttachmentDataSource dataStream = awardPrintService.printAwardReport(
awardForm.getAwardDocument().getAward(),
AwardPrintType.AWARD_BUDGET_HIERARCHY,reportParameters);
streamToResponse(dataStream, response);
return null;
}
public ActionForward printHierarchyModification(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm)form;
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward printBudget(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
// Map<String, Object> reportParameters = new HashMap<String, Object>();
// AwardPrintingService awardPrintService = KraServiceLocator
// .getService(AwardPrintingService.class);
// AttachmentDataSource dataStream = awardPrintService.printAwardReport(
// awardForm.getAwardDocument(), AwardPrintType.AWARD_TEMPLATE
// .getAwardPrintType(), reportParameters);
// streamToResponse(dataStream, response);
// return null;
//TODO: Add printing service call here
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward printTimeMoneyHistory(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
Map<String, Object> reportParameters = new HashMap<String, Object>();
AwardPrintingService awardPrintService = KraServiceLocator
.getService(AwardPrintingService.class);
AttachmentDataSource dataStream = awardPrintService.printAwardReport(
awardForm.getAwardDocument().getAward(),
AwardPrintType.MONEY_AND_END_DATES_HISTORY,reportParameters);
streamToResponse(dataStream, response);
return null;
}
public ActionForward printTransactionDetail(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
Map<String, Object> reportParameters = new HashMap<String, Object>();
if (awardForm.getAwardTimeAndMoneyTransactionReport().getAmountInfoIndex() == null) {
GlobalVariables.getMessageMap().putError("awardTimeAndMoneyTransactionReport.amountInfoIndex",
"error.award.print.transactionId.required");
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
reportParameters.put(AwardPrintParameters.SEQUENCE_NUMBER
.getAwardPrintParameter(), awardForm
.getAwardTimeAndMoneyTransactionReport().getAwardVersion());
reportParameters.put(AwardPrintParameters.TRANSACTION_ID_INDEX
.getAwardPrintParameter(), awardForm.getAwardTimeAndMoneyTransactionReport().getAmountInfoIndex());
AwardPrintingService awardPrintService = KraServiceLocator
.getService(AwardPrintingService.class);
AttachmentDataSource dataStream = awardPrintService.printAwardReport(
awardForm.getAwardDocument().getAward(),
AwardPrintType.AWARD_BUDGET_HISTORY_TRANSACTION, reportParameters);
streamToResponse(dataStream, response);
return null;
}
public AwardNumberService getAwardNumberService(){
return KraServiceLocator.getService(AwardNumberService.class);
}
protected String getAwardNumber(HttpServletRequest request) {
String awardNumber = "";
String parameterName = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
if (StringUtils.isNotBlank(parameterName)) {
awardNumber = StringUtils.substringBetween(parameterName, ".awardNumber", ".");
}
return awardNumber;
}
private int getActiveHierarchyObjectIndex(HttpServletRequest request) throws Exception {
Enumeration<String> lookupParameters = request.getParameterNames();
int index = -1;
while(lookupParameters.hasMoreElements()) {
String temp = lookupParameters.nextElement();
if(temp.startsWith("awardHierarchyTempObject[")) {
index = temp.indexOf("awardHierarchyTempObject[") + 25;
temp = temp.substring(index, index+1);
index = Integer.parseInt(temp);
break;
}
}
return index;
}
@Override
public ActionForward refresh(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception{
AwardForm awardForm = (AwardForm)form;
AwardDocument awardDocument = awardForm.getAwardDocument();
int activeHierarchyObjectIndex = getActiveHierarchyObjectIndex(request);
int loopIndex = 0;
Award currentAward = awardDocument.getAward();
for(AwardHierarchyTempObject temp: awardForm.getAwardHierarchyTempObjects()){
List<String> order = new ArrayList<String>();
if(loopIndex == activeHierarchyObjectIndex-1) {
temp.setAwardNumber2(null);
temp.setAwardNumber(null);
}
if(StringUtils.isNotBlank(temp.getAwardNumber1())){
Map<String,AwardHierarchy> awardHierarchyItems = awardForm.getAwardHierarchyBean().getAwardHierarchy(temp.getAwardNumber1(), order);
StringBuilder sb = new StringBuilder();
for(String str:order){
sb.append(awardHierarchyItems.get(str).getAwardNumber());
sb.append(KRADConstants.BLANK_SPACE).append("%3A");
}
temp.setSelectBox1(sb.toString());
request.setAttribute("selectedAwardNumber", temp.getAwardNumber());
}
if(StringUtils.isNotBlank(temp.getAwardNumber2())){
order = new ArrayList<String>();
Map<String,AwardHierarchyNode> awardHierarchyNodes = new HashMap<String, AwardHierarchyNode>();
Map<String,AwardHierarchy> awardHierarchyItems = getAwardHierarchyService().getAwardHierarchy(temp.getAwardNumber2(), order);
getAwardHierarchyService().populateAwardHierarchyNodes(awardHierarchyItems, awardHierarchyNodes, currentAward.getAwardNumber(), currentAward.getSequenceNumber().toString());
StringBuilder sb = new StringBuilder();
for(String str:order){
AwardHierarchyNode tempAwardNode = awardHierarchyNodes.get(str);
if(tempAwardNode.isAwardDocumentFinalStatus()) {
sb.append(tempAwardNode.getAwardNumber());
sb.append(KRADConstants.BLANK_SPACE).append("%3A");
}
}
temp.setSelectBox2(sb.toString());
request.setAttribute("selectedAwardNumber", temp.getAwardNumber());
}
loopIndex++;
}
return super.refresh(mapping, form, request, response);
}
private AwardHierarchy findTargetNode(HttpServletRequest request, AwardForm awardForm) {
return awardForm.getAwardHierarchyBean().getRootNode().findNodeInHierarchy(getAwardNumber(request));
}
private ActionForward prepareToForwardToNewChildAward(ActionMapping mapping, AwardForm awardForm, AwardHierarchy targetNode,
AwardHierarchy newNodeToView) throws WorkflowException {
ActionForward forward;
if(newNodeToView != null) {
awardForm.setCommand(KewApiConstants.INITIATE_COMMAND);
createDocument(awardForm);
Award newChildAward = newNodeToView.getAward();
if(!newNodeToView.isRootNode()) {
setMultipleNodeHierarchyOnAwardFormTrue(newChildAward);
}
awardForm.getAwardDocument().setAward(newChildAward);
awardForm.getAwardHierarchyBean().recordTargetNodeState(targetNode);
awardForm.getFundingProposalBean().setAllAwardsForAwardNumber(null);
forward = mapping.findForward(Constants.MAPPING_AWARD_HOME_PAGE);
} else {
forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
return forward;
}
private ActionForward prepareToForwardToNewFinalChildAward(ActionMapping mapping, AwardForm awardForm, HttpServletRequest request, HttpServletResponse response, AwardHierarchy targetNode,
AwardHierarchy newNodeToView) throws Exception {
ActionForward forward;
if(newNodeToView != null) {
awardForm.setCommand(KewApiConstants.INITIATE_COMMAND);
createDocument(awardForm);
Award newChildAward = newNodeToView.getAward();
if(!newNodeToView.isRootNode()) {
setMultipleNodeHierarchyOnAwardFormTrue(newChildAward);
}
awardForm.getAwardDocument().setAward(newChildAward);
awardForm.getAwardDocument().getDocumentHeader().setDocumentDescription("Copied Hierarchy");
awardForm.getAwardHierarchyBean().recordTargetNodeState(targetNode);
awardForm.getFundingProposalBean().setAllAwardsForAwardNumber(null);
super.save(mapping, (ActionForm) awardForm, request, response);
super.submitAward(mapping, (ActionForm) awardForm, request, response);
forward = mapping.findForward(Constants.MAPPING_AWARD_HOME_PAGE);
} else {
forward = mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
return forward;
}
/**
* Since a child award will always be part of a multiple award hierarchy, we need to set the boolean to true so that the anticipated
* and obligated totals on Details & Dates tab will be uneditable on initial creation. After the initial save of document
* this is handled in the docHandler and home methods of AwardAction.
* @param awardForm
*/
private void setMultipleNodeHierarchyOnAwardFormTrue(Award award) {
award.setAwardInMultipleNodeHierarchy(true);
}
private String getHierarchyTargetAwardNumber(HttpServletRequest request) {
return request.getParameter("awardNumberInputTemp");
}
/**
* This method is used to create a financial document using the financial
* account creation web service.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward createAccount(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
ActionForward forward = new ActionForward();
AwardForm awardForm = (AwardForm) form;
AwardDocument awardDocument = awardForm.getAwardDocument();
Award award = awardDocument.getAward();
// if the user has permissions to create a financial account
if (awardForm.getEditingMode().get("createAwardAccount").equals("true")) {
AwardAccountValidationService accountValidationService = getAwardAccountValidationService();
boolean rulePassed = accountValidationService.validateAwardAccountDetails(award);
if (rulePassed) {
AccountCreationClient client = getAccountCreationClient();
/*
* If account hasn't already been created, create it or
* display an error
*/
if (award.getFinancialAccountDocumentNumber() == null) {
// Determine the ICR Rate Code to send - may require user interaction
if (StringUtils.isBlank(award.getIcrRateCode())) {
List<ValidRates> validRates = awardForm.getAccountCreationHelper().getMatchingValidRates(award.getCurrentFandaRate());
if (validRates.size() > 1) {
awardForm.getAccountCreationHelper().setValidRateCandidates(validRates);
return mapping.findForward(Constants.MAPPING_ICR_RATE_CODE_PROMPT);
} else if (validRates.size() == 1) {
award.setIcrRateCode(validRates.get(0).getIcrRateCode());
} else {
award.setIcrRateCode(Award.ICR_RATE_CODE_NONE);
}
}
client.createAwardAccount(award);
} else {
GlobalVariables.getMessageMap().putError(ACCOUNT_ALREADY_CREATED, KeyConstants.ACCOUNT_ALREADY_CREATED);
}
}
}
else {
GlobalVariables.getMessageMap().putError(NO_PERMISSION_TO_CREATE_ACCOUNT, KeyConstants.NO_PERMISSION_TO_CREATE_ACCOUNT);
}
forward = mapping.findForward(Constants.MAPPING_AWARD_ACTIONS_PAGE);
return forward;
}
protected AccountCreationClient getAccountCreationClient() {
return KraServiceLocator.getService("accountCreationClient");
}
protected AwardAccountValidationService getAwardAccountValidationService() {
return KraServiceLocator.getService("awardAccountValidationService");
}
@Override
public ActionForward cancel(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
Award award = awardForm.getAwardDocument().getAward();
/*
* We need to ensure the user didn't create a pending version of the linked proposal,
* which, when processed, will overwrite any de-linking caused by the canceling of this Award version.
*/
Set<String> linkedPendingProposals = getLinkedPendingProposals(award);
if (!linkedPendingProposals.isEmpty()) {
String proposalNumbers = StringUtils.join(linkedPendingProposals, ", ");
GlobalVariables.getMessageMap().putError("noKey",
ERROR_CANCEL_PENDING_PROPOSALS,
proposalNumbers);
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
Object question = request.getParameter(KRADConstants.QUESTION_INST_ATTRIBUTE_NAME);
// this should probably be moved into a private instance variable
// logic for cancel question
if (question == null) {
// ask question if not already asked
return this.performQuestionWithoutInput(mapping, form, request, response, KRADConstants.DOCUMENT_CANCEL_QUESTION, getKualiConfigurationService().getPropertyValueAsString("document.question.cancel.text"), KRADConstants.CONFIRMATION_QUESTION, KRADConstants.MAPPING_CANCEL, "");
}
else {
Object buttonClicked = request.getParameter(KRADConstants.QUESTION_CLICKED_BUTTON);
if ((KRADConstants.DOCUMENT_CANCEL_QUESTION.equals(question)) && ConfirmationQuestion.NO.equals(buttonClicked)) {
// if no button clicked just reload the doc
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
// else go to cancel logic below
}
KualiDocumentFormBase kualiDocumentFormBase = (KualiDocumentFormBase) form;
doProcessingAfterPost( kualiDocumentFormBase, request );
if (award.getSequenceNumber() == 1) {
AwardHierarchy hierarchy = getAwardHierarchyService().loadAwardHierarchy(award.getAwardNumber());
hierarchy.setActive(false);
getBusinessObjectService().save(hierarchy);
}
getDocumentService().cancelDocument(kualiDocumentFormBase.getDocument(), kualiDocumentFormBase.getAnnotation());
//add all award amount info objects to previous award version and save.
// AwardForm awardForm = (AwardForm) form;
// AwardDocument awardDocument = (AwardDocument) awardForm.getDocument();
// Award award = awardDocument.getAward();
// Award activeAward = getActiveAwardVersion(award.getAwardNumber());
// activeAward.setAwardAmountInfos(award.getAwardAmountInfos());
// //reinitialize the collection so the cancelled doc can be viewed.
// award.initializeAwardAmountInfoObjects();
//
// getBusinessObjectService().save(award);
// getBusinessObjectService().save(activeAward);
return returnToSender(request, mapping, kualiDocumentFormBase);
}
/*
* Find pending proposal versions linked to this award version.
*/
private Set<String> getLinkedPendingProposals(Award award) {
Set<String> linkedPendingProposals = new HashSet<String>();
for (AwardFundingProposal awardFundingProposal : award.getFundingProposals()) {
String proposalNumber = awardFundingProposal.getProposal().getProposalNumber();
InstitutionalProposal pendingVersion = getInstitutionalProposalService().getPendingInstitutionalProposalVersion(proposalNumber);
if (pendingVersion != null && pendingVersion.isFundedByAward(award.getAwardNumber(), award.getSequenceNumber())) {
linkedPendingProposals.add(proposalNumber);
}
}
return linkedPendingProposals;
}
protected InstitutionalProposalService getInstitutionalProposalService() {
return KraServiceLocator.getService(InstitutionalProposalService.class);
}
protected VersionHistoryService getVersionHistoryService() {
return KraServiceLocator.getService(VersionHistoryService.class);
}
/**
* Called when the sync sponsor button is pressed.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward syncSponsor(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
Award award = awardForm.getAwardDocument().getAward();
getAwardSyncCreationService().addAwardSyncChange(award, new AwardSyncPendingChangeBean(AwardSyncType.ADD_SYNC, award, "sponsorCode", "sponsorCode"));
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
/**
* Called when the sync award status button is pressed.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward syncStatusCode(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
Award award = awardForm.getAwardDocument().getAward();
getAwardSyncCreationService().addAwardSyncChange(award, new AwardSyncPendingChangeBean(AwardSyncType.ADD_SYNC, award, "statusCode", "statusCode"));
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
/**
* Called to delete award sync changes.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward deleteChanges(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
Award award = awardForm.getAwardDocument().getAward();
ListIterator<AwardSyncChange> iter = award.getSyncChanges().listIterator();
while (iter.hasNext()) {
AwardSyncChange change = iter.next();
if (change.isDelete()) {
getBusinessObjectService().delete(change);
iter.remove();
}
}
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
/**
* Turns on sync mode.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward activateSyncMode(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
awardForm.setSyncMode(true);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
/**
* Turn off sync mode.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward deactivateSyncMode(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm)form;
awardForm.setSyncMode(false);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
/**
* Clears all sync type selections.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward clearSyncSelections(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
for (AwardSyncChange change : awardForm.getAwardDocument().getAward().getSyncChanges()) {
change.setSyncDescendants(null);
change.setSyncFabricated(false);
change.setSyncCostSharing(false);
}
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
/**
* Routes document back to previous route node that will re-run validation.
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward rerunValidation(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response)
throws Exception {
AwardForm awardForm = (AwardForm) form;
awardForm.getAwardSyncBean().getParentAwardStatus().setStatus("Validation In Progress");
getBusinessObjectService().save(awardForm.getAwardSyncBean().getParentAwardStatus());
awardForm.getAwardDocument().getDocumentHeader().
getWorkflowDocument().returnToPreviousNode("Re-run Hierarchy Sync Validation", Constants.AWARD_SYNC_HAS_SYNC_NODE_NAME);
return mapping.findForward(Constants.MAPPING_AWARD_BASIC);
}
public ActionForward sendNotification(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
AwardForm awardForm = (AwardForm) form;
Award award = awardForm.getAwardDocument().getAward();
AwardNotificationContext context = new AwardNotificationContext(award, null, "Ad-Hoc Notification", Constants.MAPPING_AWARD_ACTIONS_PAGE);
awardForm.getNotificationHelper().initializeDefaultValues(context);
return mapping.findForward("notificationEditor");
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2014 - 2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import org.netbeans.api.progress.aggregate.AggregateProgressFactory;
import org.netbeans.api.progress.aggregate.AggregateProgressHandle;
import org.netbeans.api.progress.aggregate.ProgressContributor;
import org.openide.util.Cancellable;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.StopWatch;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
/**
* Singleton keyword search manager: Launches search threads for each job and
* performs commits, both on timed intervals.
*/
final class IngestSearchRunner {
private static final Logger logger = Logger.getLogger(IngestSearchRunner.class.getName());
private static IngestSearchRunner instance = null;
private IngestServices services = IngestServices.getInstance();
private Ingester ingester = null;
private long currentUpdateIntervalMs;
private volatile boolean periodicSearchTaskRunning = false;
private Future<?> jobProcessingTaskFuture;
private final ScheduledThreadPoolExecutor jobProcessingExecutor;
private static final int NUM_SEARCH_SCHEDULING_THREADS = 1;
private static final String SEARCH_SCHEDULER_THREAD_NAME = "periodic-search-scheduler-%d";
// maps a jobID to the search
private Map<Long, SearchJobInfo> jobs = new ConcurrentHashMap<>();
IngestSearchRunner() {
currentUpdateIntervalMs = ((long) KeywordSearchSettings.getUpdateFrequency().getTime()) * 60 * 1000;
ingester = Ingester.getDefault();
jobProcessingExecutor = new ScheduledThreadPoolExecutor(NUM_SEARCH_SCHEDULING_THREADS, new ThreadFactoryBuilder().setNameFormat(SEARCH_SCHEDULER_THREAD_NAME).build());
}
/**
*
* @return the singleton object
*/
public static synchronized IngestSearchRunner getInstance() {
if (instance == null) {
instance = new IngestSearchRunner();
}
return instance;
}
/**
*
* @param jobContext
* @param keywordListNames
*/
public synchronized void startJob(IngestJobContext jobContext, List<String> keywordListNames) {
long jobId = jobContext.getJobId();
if (jobs.containsKey(jobId) == false) {
logger.log(Level.INFO, "Adding job {0}", jobId); //NON-NLS
SearchJobInfo jobData = new SearchJobInfo(jobContext, keywordListNames);
jobs.put(jobId, jobData);
}
// keep track of how many threads / module instances from this job have asked for this
jobs.get(jobId).incrementModuleReferenceCount();
// start the timer, if needed
if ((jobs.size() > 0) && (periodicSearchTaskRunning == false)) {
// reset the default periodic search frequency to the user setting
logger.log(Level.INFO, "Resetting periodic search time out to default value"); //NON-NLS
currentUpdateIntervalMs = ((long) KeywordSearchSettings.getUpdateFrequency().getTime()) * 60 * 1000;
jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS);
periodicSearchTaskRunning = true;
}
}
/**
* Perform normal finishing of searching for this job, including one last
* commit and search. Blocks until the final search is complete.
*
* @param jobId
*/
public synchronized void endJob(long jobId) {
SearchJobInfo job;
boolean readyForFinalSearch = false;
job = jobs.get(jobId);
if (job == null) {
return;
}
// Only do final search if this is the last module/thread in this job to call endJob()
if (job.decrementModuleReferenceCount() == 0) {
jobs.remove(jobId);
readyForFinalSearch = true;
}
if (readyForFinalSearch) {
logger.log(Level.INFO, "Commiting search index before final search for search job {0}", job.getJobId()); //NON-NLS
commit();
doFinalSearch(job); //this will block until it's done
// new jobs could have been added while we were doing final search
if (jobs.isEmpty()) {
// no more jobs left. stop the PeriodicSearchTask.
// A new one will be created for future jobs.
logger.log(Level.INFO, "No more search jobs. Stopping periodic search task"); //NON-NLS
periodicSearchTaskRunning = false;
jobProcessingTaskFuture.cancel(true);
}
}
}
/**
* Immediate stop and removal of job from SearchRunner. Cancels the
* associated search worker if it's still running.
*
* @param jobId
*/
public synchronized void stopJob(long jobId) {
logger.log(Level.INFO, "Stopping search job {0}", jobId); //NON-NLS
commit();
SearchJobInfo job;
job = jobs.get(jobId);
if (job == null) {
return;
}
//stop currentSearcher
IngestSearchRunner.Searcher currentSearcher = job.getCurrentSearcher();
if ((currentSearcher != null) && (!currentSearcher.isDone())) {
logger.log(Level.INFO, "Cancelling search job {0}", jobId); //NON-NLS
currentSearcher.cancel(true);
}
jobs.remove(jobId);
if (jobs.isEmpty()) {
// no more jobs left. stop the PeriodicSearchTask.
// A new one will be created for future jobs.
logger.log(Level.INFO, "No more search jobs. Stopping periodic search task"); //NON-NLS
periodicSearchTaskRunning = false;
jobProcessingTaskFuture.cancel(true);
}
}
/**
* Add these lists to all of the jobs. Used when user wants to search for a
* list while ingest has already started.
*
* @param keywordListNames
*/
public synchronized void addKeywordListsToAllJobs(List<String> keywordListNames) {
for (String listName : keywordListNames) {
logger.log(Level.INFO, "Adding keyword list {0} to all jobs", listName); //NON-NLS
for (SearchJobInfo j : jobs.values()) {
j.addKeywordListName(listName);
}
}
}
/**
* Commits index and notifies listeners of index update
*/
private void commit() {
ingester.commit();
// Signal a potential change in number of text_ingested files
try {
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
KeywordSearch.fireNumIndexedFilesChange(null, numIndexedFiles);
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
logger.log(Level.SEVERE, "Error executing Solr query to check number of indexed files", ex); //NON-NLS
}
}
/**
* A final search waits for any still-running workers, and then executes a
* new one and waits until that is done.
*
* @param job
*/
private void doFinalSearch(SearchJobInfo job) {
// Run one last search as there are probably some new files committed
logger.log(Level.INFO, "Starting final search for search job {0}", job.getJobId()); //NON-NLS
if (!job.getKeywordListNames().isEmpty()) {
try {
// In case this job still has a worker running, wait for it to finish
logger.log(Level.INFO, "Checking for previous search for search job {0} before executing final search", job.getJobId()); //NON-NLS
job.waitForCurrentWorker();
IngestSearchRunner.Searcher finalSearcher = new IngestSearchRunner.Searcher(job, true);
job.setCurrentSearcher(finalSearcher); //save the ref
logger.log(Level.INFO, "Kicking off final search for search job {0}", job.getJobId()); //NON-NLS
finalSearcher.execute(); //start thread
// block until the search is complete
logger.log(Level.INFO, "Waiting for final search for search job {0}", job.getJobId()); //NON-NLS
finalSearcher.get();
logger.log(Level.INFO, "Final search for search job {0} completed", job.getJobId()); //NON-NLS
} catch (InterruptedException | CancellationException ex) {
logger.log(Level.INFO, "Final search for search job {0} interrupted or cancelled", job.getJobId()); //NON-NLS
} catch (ExecutionException ex) {
logger.log(Level.SEVERE, String.format("Final search for search job %d failed", job.getJobId()), ex); //NON-NLS
}
}
}
/**
* Task to perform periodic searches for each job (does a single index commit first)
*/
private final class PeriodicSearchTask implements Runnable {
private final Logger logger = Logger.getLogger(IngestSearchRunner.PeriodicSearchTask.class.getName());
@Override
public void run() {
// If no jobs then cancel the task. If more job(s) come along, a new task will start up.
if (jobs.isEmpty() || jobProcessingTaskFuture.isCancelled()) {
logger.log(Level.INFO, "Exiting periodic search task"); //NON-NLS
periodicSearchTaskRunning = false;
return;
}
commit();
logger.log(Level.INFO, "Starting periodic searches");
final StopWatch stopWatch = new StopWatch();
stopWatch.start();
// NOTE: contents of "jobs" ConcurrentHashMap can be modified in stopJob() and endJob() while we are inside this loop
for (Iterator<Entry<Long, SearchJobInfo>> iterator = jobs.entrySet().iterator(); iterator.hasNext();) {
SearchJobInfo job = iterator.next().getValue();
if (jobProcessingTaskFuture.isCancelled()) {
logger.log(Level.INFO, "Search has been cancelled. Exiting periodic search task."); //NON-NLS
periodicSearchTaskRunning = false;
return;
}
// If no lists or the worker is already running then skip it
if (!job.getKeywordListNames().isEmpty() && !job.isWorkerRunning()) {
// Spawn a search thread for each job
logger.log(Level.INFO, "Executing periodic search for search job {0}", job.getJobId());
Searcher searcher = new Searcher(job); // SwingWorker
job.setCurrentSearcher(searcher); //save the ref
searcher.execute(); //start thread
job.setWorkerRunning(true);
try {
// wait for the searcher to finish
searcher.get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.SEVERE, "Error performing keyword search: {0}", ex.getMessage()); //NON-NLS
services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"SearchRunner.Searcher.done.err.msg"), ex.getMessage()));
}// catch and ignore if we were cancelled
catch (java.util.concurrent.CancellationException ex) {
}
}
}
stopWatch.stop();
logger.log(Level.INFO, "All periodic searches cumulatively took {0} secs", stopWatch.getElapsedTimeSecs()); //NON-NLS
// calculate "hold off" time
recalculateUpdateIntervalTime(stopWatch.getElapsedTimeSecs()); // ELDEBUG
// schedule next PeriodicSearchTask
jobProcessingTaskFuture = jobProcessingExecutor.schedule(new PeriodicSearchTask(), currentUpdateIntervalMs, MILLISECONDS);
// exit this thread
return;
}
private void recalculateUpdateIntervalTime(long lastSerchTimeSec) {
// If periodic search takes more than 1/4 of the current periodic search interval, then double the search interval
if (lastSerchTimeSec * 1000 < currentUpdateIntervalMs / 4) {
return;
}
// double the search interval
currentUpdateIntervalMs = currentUpdateIntervalMs * 2;
logger.log(Level.WARNING, "Last periodic search took {0} sec. Increasing search interval to {1} sec", new Object[]{lastSerchTimeSec, currentUpdateIntervalMs/1000});
return;
}
}
/**
* Data structure to keep track of keyword lists, current results, and
* search running status for each jobid
*/
private class SearchJobInfo {
private final IngestJobContext jobContext;
private final long jobId;
private final long dataSourceId;
// mutable state:
private volatile boolean workerRunning;
private List<String> keywordListNames; //guarded by SearchJobInfo.this
// Map of keyword to the object ids that contain a hit
private Map<Keyword, Set<Long>> currentResults; //guarded by SearchJobInfo.this
private IngestSearchRunner.Searcher currentSearcher;
private AtomicLong moduleReferenceCount = new AtomicLong(0);
private final Object finalSearchLock = new Object(); //used for a condition wait
private SearchJobInfo(IngestJobContext jobContext, List<String> keywordListNames) {
this.jobContext = jobContext;
this.jobId = jobContext.getJobId();
this.dataSourceId = jobContext.getDataSource().getId();
this.keywordListNames = new ArrayList<>(keywordListNames);
currentResults = new HashMap<>();
workerRunning = false;
currentSearcher = null;
}
private IngestJobContext getJobContext() {
return jobContext;
}
private long getJobId() {
return jobId;
}
private long getDataSourceId() {
return dataSourceId;
}
private synchronized List<String> getKeywordListNames() {
return new ArrayList<>(keywordListNames);
}
private synchronized void addKeywordListName(String keywordListName) {
if (!keywordListNames.contains(keywordListName)) {
keywordListNames.add(keywordListName);
}
}
private synchronized Set<Long> currentKeywordResults(Keyword k) {
return currentResults.get(k);
}
private synchronized void addKeywordResults(Keyword k, Set<Long> resultsIDs) {
currentResults.put(k, resultsIDs);
}
private boolean isWorkerRunning() {
return workerRunning;
}
private void setWorkerRunning(boolean flag) {
workerRunning = flag;
}
private synchronized IngestSearchRunner.Searcher getCurrentSearcher() {
return currentSearcher;
}
private synchronized void setCurrentSearcher(IngestSearchRunner.Searcher searchRunner) {
currentSearcher = searchRunner;
}
private void incrementModuleReferenceCount() {
moduleReferenceCount.incrementAndGet();
}
private long decrementModuleReferenceCount() {
return moduleReferenceCount.decrementAndGet();
}
/**
* In case this job still has a worker running, wait for it to finish
*
* @throws InterruptedException
*/
private void waitForCurrentWorker() throws InterruptedException {
synchronized (finalSearchLock) {
while (workerRunning) {
logger.log(Level.INFO, "Waiting for previous worker to finish"); //NON-NLS
finalSearchLock.wait(); //wait() releases the lock
logger.log(Level.INFO, "Notified previous worker finished"); //NON-NLS
}
}
}
/**
* Unset workerRunning and wake up thread(s) waiting on finalSearchLock
*/
private void searchNotify() {
synchronized (finalSearchLock) {
logger.log(Level.INFO, "Notifying after finishing search"); //NON-NLS
workerRunning = false;
finalSearchLock.notify();
}
}
}
/**
* Searcher responsible for searching the current index and writing results
* to blackboard and the inbox. Also, posts results to listeners as Ingest
* data events. Searches entire index, and keeps track of only new results
* to report and save. Runs as a background thread.
*/
private final class Searcher extends SwingWorker<Object, Void> {
/**
* Searcher has private copies/snapshots of the lists and keywords
*/
private SearchJobInfo job;
private List<Keyword> keywords; //keywords to search
private List<String> keywordListNames; // lists currently being searched
private List<KeywordList> keywordLists;
private Map<Keyword, KeywordList> keywordToList; //keyword to list name mapping
private AggregateProgressHandle progressGroup;
private final Logger logger = Logger.getLogger(IngestSearchRunner.Searcher.class.getName());
private boolean finalRun = false;
Searcher(SearchJobInfo job) {
this.job = job;
keywordListNames = job.getKeywordListNames();
keywords = new ArrayList<>();
keywordToList = new HashMap<>();
keywordLists = new ArrayList<>();
//keywords are populated as searcher runs
}
Searcher(SearchJobInfo job, boolean finalRun) {
this(job);
this.finalRun = finalRun;
}
@Override
@Messages("SearchRunner.query.exception.msg=Error performing query:")
protected Object doInBackground() throws Exception {
final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName")
+ (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : "");
final String pgDisplayName = displayName + (" (" + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") + ")");
progressGroup = AggregateProgressFactory.createSystemHandle(pgDisplayName, null, new Cancellable() {
@Override
public boolean cancel() {
logger.log(Level.INFO, "Cancelling the searcher by user."); //NON-NLS
if (progressGroup != null) {
progressGroup.setDisplayName(displayName + " " + NbBundle.getMessage(this.getClass(), "SearchRunner.doInBackGround.cancelMsg"));
}
return IngestSearchRunner.Searcher.this.cancel(true);
}
}, null);
updateKeywords();
ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()];
int i = 0;
for (Keyword keywordQuery : keywords) {
subProgresses[i] = AggregateProgressFactory.createProgressContributor(keywordQuery.getSearchTerm());
progressGroup.addContributor(subProgresses[i]);
i++;
}
progressGroup.start();
final StopWatch stopWatch = new StopWatch();
stopWatch.start();
try {
progressGroup.setDisplayName(displayName);
int keywordsSearched = 0;
for (Keyword keyword : keywords) {
if (this.isCancelled() || this.job.getJobContext().fileIngestIsCancelled()) {
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keyword.getSearchTerm()); //NON-NLS
return null;
}
final KeywordList keywordList = keywordToList.get(keyword);
//new subProgress will be active after the initial query
//when we know number of hits to start() with
if (keywordsSearched > 0) {
subProgresses[keywordsSearched - 1].finish();
}
KeywordSearchQuery keywordSearchQuery = KeywordSearchUtil.getQueryForKeyword(keyword, keywordList);
// Filtering
//limit search to currently ingested data sources
//set up a filter with 1 or more image ids OR'ed
final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, job.getDataSourceId());
keywordSearchQuery.addFilter(dataSourceFilter);
QueryResults queryResults;
// Do the actual search
try {
queryResults = keywordSearchQuery.performQuery();
} catch (KeywordSearchModuleException | NoOpenCoreException ex) {
logger.log(Level.SEVERE, "Error performing query: " + keyword.getSearchTerm(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.SearchRunner_query_exception_msg() + keyword.getSearchTerm(), ex.getCause().getMessage());
//no reason to continue with next query if recovery failed
//or wait for recovery to kick in and run again later
//likely case has closed and threads are being interrupted
return null;
} catch (CancellationException e) {
logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keyword.getSearchTerm()); //NON-NLS
return null;
}
// Reduce the results of the query to only those hits we
// have not already seen.
QueryResults newResults = filterResults(queryResults);
if (!newResults.getKeywords().isEmpty()) {
// Write results to BB
//scale progress bar more more granular, per result sub-progress, within per keyword
int totalUnits = newResults.getKeywords().size();
subProgresses[keywordsSearched].start(totalUnits);
int unitProgress = 0;
String queryDisplayStr = keyword.getSearchTerm();
if (queryDisplayStr.length() > 50) {
queryDisplayStr = queryDisplayStr.substring(0, 49) + "...";
}
subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress);
// Create blackboard artifacts
newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true);
} //if has results
//reset the status text before it goes away
subProgresses[keywordsSearched].progress("");
++keywordsSearched;
} //for each keyword
} //end try block
catch (Exception ex) {
logger.log(Level.WARNING, "searcher exception occurred", ex); //NON-NLS
} finally {
try {
finalizeSearcher();
stopWatch.stop();
logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS
} finally {
// In case a thread is waiting on this worker to be done
job.searchNotify();
}
}
return null;
}
/**
* Sync-up the updated keywords from the currently used lists in the XML
*/
private void updateKeywords() {
XmlKeywordSearchList loader = XmlKeywordSearchList.getCurrent();
keywords.clear();
keywordToList.clear();
keywordLists.clear();
for (String name : keywordListNames) {
KeywordList list = loader.getList(name);
keywordLists.add(list);
for (Keyword k : list.getKeywords()) {
keywords.add(k);
keywordToList.put(k, list);
}
}
}
/**
* Performs the cleanup that needs to be done right AFTER
* doInBackground() returns without relying on done() method that is not
* guaranteed to run.
*/
private void finalizeSearcher() {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
progressGroup.finish();
}
});
}
/**
* This method filters out all of the hits found in earlier periodic
* searches and returns only the results found by the most recent
* search.
*
* This method will only return hits for objects for which we haven't
* previously seen a hit for the keyword.
*
* @param queryResult The results returned by a keyword search.
*
* @return A unique set of hits found by the most recent search for
* objects that have not previously had a hit. The hits will be
* for the lowest numbered chunk associated with the object.
*
*/
private QueryResults filterResults(QueryResults queryResult) {
// Create a new (empty) QueryResults object to hold the most recently
// found hits.
QueryResults newResults = new QueryResults(queryResult.getQuery());
// For each keyword represented in the results.
for (Keyword keyword : queryResult.getKeywords()) {
// These are all of the hits across all objects for the most recent search.
// This may well include duplicates of hits we've seen in earlier periodic searches.
List<KeywordHit> queryTermResults = queryResult.getResults(keyword);
// Sort the hits for this keyword so that we are always
// guaranteed to return the hit for the lowest chunk.
Collections.sort(queryTermResults);
// This will be used to build up the hits we haven't seen before
// for this keyword.
List<KeywordHit> newUniqueHits = new ArrayList<>();
// Get the set of object ids seen in the past by this searcher
// for the given keyword.
Set<Long> curTermResults = job.currentKeywordResults(keyword);
if (curTermResults == null) {
// We create a new empty set if we haven't seen results for
// this keyword before.
curTermResults = new HashSet<>();
}
// For each hit for this keyword.
for (KeywordHit hit : queryTermResults) {
if (curTermResults.contains(hit.getSolrObjectId())) {
// Skip the hit if we've already seen a hit for
// this keyword in the object.
continue;
}
// We haven't seen the hit before so add it to list of new
// unique hits.
newUniqueHits.add(hit);
// Add the object id to the results we've seen for this
// keyword.
curTermResults.add(hit.getSolrObjectId());
}
// Update the job with the list of objects for which we have
// seen hits for the current keyword.
job.addKeywordResults(keyword, curTermResults);
// Add the new hits for the current keyword into the results
// to be returned.
newResults.addResult(keyword, newUniqueHits);
}
return newResults;
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson;
import hudson.model.Descriptor;
import hudson.model.Describable;
import hudson.model.Hudson;
import hudson.model.ViewDescriptor;
import hudson.model.Descriptor.FormException;
import hudson.util.AdaptedIterator;
import hudson.util.Memoizer;
import hudson.util.Iterators.FlattenIterator;
import hudson.slaves.NodeDescriptor;
import hudson.tasks.Publisher;
import hudson.tasks.Publisher.DescriptorExtensionListImpl;
import java.util.List;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.logging.Logger;
import java.util.concurrent.CopyOnWriteArrayList;
import java.lang.reflect.Type;
import java.lang.reflect.ParameterizedType;
import org.jvnet.tiger_types.Types;
import org.kohsuke.stapler.Stapler;
import net.sf.json.JSONObject;
/**
* {@link ExtensionList} for holding a set of {@link Descriptor}s, which is a group of descriptors for
* the same extension point.
*
* Use {@link Hudson#getDescriptorList(Class)} to obtain instances.
*
* @param <D>
* Represents the descriptor type. This is {@code Descriptor<T>} normally but often there are subtypes
* of descriptors, like {@link ViewDescriptor}, {@link NodeDescriptor}, etc, and this parameter points
* to those for better type safety of users.
*
* The actual value of 'D' is not necessary for the operation of this code, so it's purely for convenience
* of the users of this class.
*
* @since 1.286
*/
public class DescriptorExtensionList<T extends Describable<T>, D extends Descriptor<T>> extends ExtensionList<D> {
/**
* Creates a new instance.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static <T extends Describable<T>,D extends Descriptor<T>>
DescriptorExtensionList<T,D> createDescriptorList(Hudson hudson, Class<T> describableType) {
if (describableType == (Class) Publisher.class) {
return (DescriptorExtensionList) new DescriptorExtensionListImpl(hudson);
}
return new DescriptorExtensionList<T,D>(hudson,describableType);
}
/**
* Type of the {@link Describable} that this extension list retains.
*/
private final Class<T> describableType;
protected DescriptorExtensionList(Hudson hudson, Class<T> describableType) {
super(hudson, (Class)Descriptor.class, (CopyOnWriteArrayList)getLegacyDescriptors(describableType));
this.describableType = describableType;
}
/**
* Finds the descriptor that has the matching fully-qualified class name.
*
* @param fqcn
* Fully qualified name of the descriptor, not the describable.
*/
public D find(String fqcn) {
return Descriptor.find(this,fqcn);
}
/**
* Finds the descriptor that describes the given type.
* That is, if this method returns d, {@code d.clazz==type}
*/
public D find(Class<? extends T> type) {
for (D d : this)
if (d.clazz==type)
return d;
return null;
}
/**
* Creates a new instance of a {@link Describable}
* from the structured form submission data posted
* by a radio button group.
*/
public T newInstanceFromRadioList(JSONObject config) throws FormException {
if(config.isNullObject())
return null; // none was selected
int idx = config.getInt("value");
return get(idx).newInstance(Stapler.getCurrentRequest(),config);
}
public T newInstanceFromRadioList(JSONObject parent, String name) throws FormException {
return newInstanceFromRadioList(parent.getJSONObject(name));
}
/**
* Finds a descriptor by their {@link Descriptor#getId()}.
*
* If none is found, null is returned.
*/
public Descriptor<T> findByName(String id) {
for (Descriptor<T> d : this)
if(d.getId().equals(id))
return d;
return null;
}
/**
* {@link #load()} in the descriptor is not a real load activity, so locking against "this" is enough.
*/
@Override
protected Object getLoadLock() {
return this;
}
/**
* Loading the descriptors in this case means filtering the descriptor from the master {@link ExtensionList}.
*/
@Override
protected List<ExtensionComponent<D>> load() {
List<ExtensionComponent<D>> r = new ArrayList<ExtensionComponent<D>>();
for( ExtensionComponent<Descriptor> c : hudson.getExtensionList(Descriptor.class).getComponents() ) {
Descriptor d = c.getInstance();
Type subTyping = Types.getBaseClass(d.getClass(), Descriptor.class);
if (!(subTyping instanceof ParameterizedType)) {
LOGGER.severe(d.getClass()+" doesn't extend Descriptor with a type parameter");
continue; // skip this one
}
if(Types.erasure(Types.getTypeArgument(subTyping,0))==(Class)describableType)
r.add((ExtensionComponent)c);
}
return r;
}
/**
* Stores manually registered Descriptor instances. Keyed by the {@link Describable} type.
*/
private static final Memoizer<Class,CopyOnWriteArrayList<ExtensionComponent<Descriptor>>> legacyDescriptors = new Memoizer<Class,CopyOnWriteArrayList<ExtensionComponent<Descriptor>>>() {
public CopyOnWriteArrayList compute(Class key) {
return new CopyOnWriteArrayList();
}
};
private static <T extends Describable<T>> CopyOnWriteArrayList<ExtensionComponent<Descriptor<T>>> getLegacyDescriptors(Class<T> type) {
return (CopyOnWriteArrayList)legacyDescriptors.get(type);
}
/**
* List up all the legacy instances currently in use.
*/
public static Iterable<Descriptor> listLegacyInstances() {
return new Iterable<Descriptor>() {
public Iterator<Descriptor> iterator() {
return new AdaptedIterator<ExtensionComponent<Descriptor>,Descriptor>(
new FlattenIterator<ExtensionComponent<Descriptor>,CopyOnWriteArrayList<ExtensionComponent<Descriptor>>>(legacyDescriptors.values()) {
protected Iterator<ExtensionComponent<Descriptor>> expand(CopyOnWriteArrayList<ExtensionComponent<Descriptor>> v) {
return v.iterator();
}
}) {
protected Descriptor adapt(ExtensionComponent<Descriptor> item) {
return item.getInstance();
}
};
}
};
}
/**
* Exposed just for the test harness. Clear legacy instances.
*/
public static void clearLegacyInstances() {
legacyDescriptors.clear();
}
private static final Logger LOGGER = Logger.getLogger(DescriptorExtensionList.class.getName());
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.domain.entity.descriptor;
import static org.optaplanner.core.impl.domain.common.accessor.MemberAccessorFactory.MemberAccessorType.FIELD_OR_GETTER_METHOD;
import static org.optaplanner.core.impl.domain.common.accessor.MemberAccessorFactory.MemberAccessorType.FIELD_OR_GETTER_METHOD_WITH_SETTER;
import static org.optaplanner.core.impl.domain.common.accessor.MemberAccessorFactory.MemberAccessorType.FIELD_OR_READ_METHOD;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Member;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import org.optaplanner.core.api.domain.entity.PinningFilter;
import org.optaplanner.core.api.domain.entity.PlanningEntity;
import org.optaplanner.core.api.domain.entity.PlanningPin;
import org.optaplanner.core.api.domain.solution.PlanningSolution;
import org.optaplanner.core.api.domain.valuerange.ValueRangeProvider;
import org.optaplanner.core.api.domain.variable.AnchorShadowVariable;
import org.optaplanner.core.api.domain.variable.CustomShadowVariable;
import org.optaplanner.core.api.domain.variable.InverseRelationShadowVariable;
import org.optaplanner.core.api.domain.variable.PlanningVariable;
import org.optaplanner.core.api.score.director.ScoreDirector;
import org.optaplanner.core.config.heuristic.selector.common.decorator.SelectionSorterOrder;
import org.optaplanner.core.config.util.ConfigUtils;
import org.optaplanner.core.impl.domain.common.ReflectionHelper;
import org.optaplanner.core.impl.domain.common.accessor.MemberAccessor;
import org.optaplanner.core.impl.domain.common.accessor.MemberAccessorFactory;
import org.optaplanner.core.impl.domain.policy.DescriptorPolicy;
import org.optaplanner.core.impl.domain.solution.descriptor.SolutionDescriptor;
import org.optaplanner.core.impl.domain.variable.anchor.AnchorShadowVariableDescriptor;
import org.optaplanner.core.impl.domain.variable.custom.CustomShadowVariableDescriptor;
import org.optaplanner.core.impl.domain.variable.descriptor.GenuineVariableDescriptor;
import org.optaplanner.core.impl.domain.variable.descriptor.ShadowVariableDescriptor;
import org.optaplanner.core.impl.domain.variable.descriptor.VariableDescriptor;
import org.optaplanner.core.impl.domain.variable.inverserelation.InverseRelationShadowVariableDescriptor;
import org.optaplanner.core.impl.heuristic.selector.common.decorator.ComparatorSelectionSorter;
import org.optaplanner.core.impl.heuristic.selector.common.decorator.CompositeSelectionFilter;
import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionFilter;
import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorter;
import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorterWeightFactory;
import org.optaplanner.core.impl.heuristic.selector.common.decorator.WeightFactorySelectionSorter;
import org.optaplanner.core.impl.heuristic.selector.entity.decorator.PinEntityFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @param <Solution_> the solution type, the class with the {@link PlanningSolution} annotation
*/
public class EntityDescriptor<Solution_> {
private static final Class[] VARIABLE_ANNOTATION_CLASSES = {
PlanningVariable.class,
InverseRelationShadowVariable.class, AnchorShadowVariable.class,
CustomShadowVariable.class };
protected final transient Logger logger = LoggerFactory.getLogger(getClass());
private final SolutionDescriptor<Solution_> solutionDescriptor;
private final Class<?> entityClass;
private final Predicate<Object> isInitializedPredicate;
// Only declared movable filter, excludes inherited and descending movable filters
private SelectionFilter declaredMovableEntitySelectionFilter;
private SelectionSorter decreasingDifficultySorter;
// Only declared variable descriptors, excludes inherited variable descriptors
private Map<String, GenuineVariableDescriptor<Solution_>> declaredGenuineVariableDescriptorMap;
private Map<String, ShadowVariableDescriptor<Solution_>> declaredShadowVariableDescriptorMap;
private List<SelectionFilter> declaredPinEntityFilterList;
private List<EntityDescriptor<Solution_>> inheritedEntityDescriptorList;
// Caches the inherited, declared and descending movable filters (including @PlanningPin filters) as a composite filter
private SelectionFilter effectiveMovableEntitySelectionFilter;
// Caches the inherited and declared variable descriptors
private Map<String, GenuineVariableDescriptor<Solution_>> effectiveGenuineVariableDescriptorMap;
private Map<String, ShadowVariableDescriptor<Solution_>> effectiveShadowVariableDescriptorMap;
private Map<String, VariableDescriptor<Solution_>> effectiveVariableDescriptorMap;
// ************************************************************************
// Constructors and simple getters/setters
// ************************************************************************
public EntityDescriptor(SolutionDescriptor<Solution_> solutionDescriptor, Class<?> entityClass) {
this.solutionDescriptor = solutionDescriptor;
this.entityClass = entityClass;
isInitializedPredicate = this::isInitialized;
if (entityClass.getPackage() == null) {
logger.warn("The entityClass ({}) should be in a proper java package.", entityClass);
}
}
/**
* Using entityDescriptor::isInitialized directly breaks node sharing
* because it creates multiple instances of this {@link Predicate}.
*
* @return never null, always the same {@link Predicate} instance to {@link #isInitialized(Object)}
*/
public Predicate<Object> getIsInitializedPredicate() {
return isInitializedPredicate;
}
// ************************************************************************
// Lifecycle methods
// ************************************************************************
public void processAnnotations(DescriptorPolicy descriptorPolicy) {
processEntityAnnotations(descriptorPolicy);
declaredGenuineVariableDescriptorMap = new LinkedHashMap<>();
declaredShadowVariableDescriptorMap = new LinkedHashMap<>();
declaredPinEntityFilterList = new ArrayList<>(2);
// Only iterate declared fields and methods, not inherited members, to avoid registering the same one twice
List<Member> memberList = ConfigUtils.getDeclaredMembers(entityClass);
for (Member member : memberList) {
processValueRangeProviderAnnotation(descriptorPolicy, member);
processPlanningVariableAnnotation(descriptorPolicy, member);
processPlanningPinAnnotation(descriptorPolicy, member);
}
if (declaredGenuineVariableDescriptorMap.isEmpty() && declaredShadowVariableDescriptorMap.isEmpty()) {
throw new IllegalStateException("The entityClass (" + entityClass
+ ") should have at least 1 getter method or 1 field with a "
+ PlanningVariable.class.getSimpleName() + " annotation or a shadow variable annotation.");
}
processVariableAnnotations(descriptorPolicy);
}
private void processEntityAnnotations(DescriptorPolicy descriptorPolicy) {
PlanningEntity entityAnnotation = entityClass.getAnnotation(PlanningEntity.class);
if (entityAnnotation == null) {
throw new IllegalStateException("The entityClass (" + entityClass
+ ") has been specified as a planning entity in the configuration," +
" but does not have a " + PlanningEntity.class.getSimpleName() + " annotation.");
}
processMovable(descriptorPolicy, entityAnnotation);
processDifficulty(descriptorPolicy, entityAnnotation);
}
private void processMovable(DescriptorPolicy descriptorPolicy, PlanningEntity entityAnnotation) {
Class<? extends PinningFilter> pinningFilterClass = entityAnnotation.pinningFilter();
boolean hasPinningFilter = pinningFilterClass != PlanningEntity.NullPinningFilter.class;
if (hasPinningFilter) {
declaredMovableEntitySelectionFilter = new SelectionFilter() {
private final PinningFilter pinningFilter =
ConfigUtils.newInstance(this, "pinningFilterClass", pinningFilterClass);
@Override
public boolean accept(ScoreDirector scoreDirector, Object selection) {
return !pinningFilter.accept(scoreDirector.getWorkingSolution(), selection);
}
};
}
}
private void processDifficulty(DescriptorPolicy descriptorPolicy, PlanningEntity entityAnnotation) {
Class<? extends Comparator> difficultyComparatorClass = entityAnnotation.difficultyComparatorClass();
if (difficultyComparatorClass == PlanningEntity.NullDifficultyComparator.class) {
difficultyComparatorClass = null;
}
Class<? extends SelectionSorterWeightFactory> difficultyWeightFactoryClass = entityAnnotation
.difficultyWeightFactoryClass();
if (difficultyWeightFactoryClass == PlanningEntity.NullDifficultyWeightFactory.class) {
difficultyWeightFactoryClass = null;
}
if (difficultyComparatorClass != null && difficultyWeightFactoryClass != null) {
throw new IllegalStateException("The entityClass (" + entityClass
+ ") cannot have a difficultyComparatorClass (" + difficultyComparatorClass.getName()
+ ") and a difficultyWeightFactoryClass (" + difficultyWeightFactoryClass.getName()
+ ") at the same time.");
}
if (difficultyComparatorClass != null) {
Comparator<Object> difficultyComparator = ConfigUtils.newInstance(this,
"difficultyComparatorClass", difficultyComparatorClass);
decreasingDifficultySorter = new ComparatorSelectionSorter<Solution_, Object>(
difficultyComparator, SelectionSorterOrder.DESCENDING);
}
if (difficultyWeightFactoryClass != null) {
SelectionSorterWeightFactory<Solution_, Object> difficultyWeightFactory = ConfigUtils.newInstance(this,
"difficultyWeightFactoryClass", difficultyWeightFactoryClass);
decreasingDifficultySorter = new WeightFactorySelectionSorter<>(
difficultyWeightFactory, SelectionSorterOrder.DESCENDING);
}
}
private void processValueRangeProviderAnnotation(DescriptorPolicy descriptorPolicy, Member member) {
if (((AnnotatedElement) member).isAnnotationPresent(ValueRangeProvider.class)) {
MemberAccessor memberAccessor = MemberAccessorFactory.buildMemberAccessor(
member, FIELD_OR_READ_METHOD, ValueRangeProvider.class);
descriptorPolicy.addFromEntityValueRangeProvider(
memberAccessor);
}
}
private void processPlanningVariableAnnotation(DescriptorPolicy descriptorPolicy, Member member) {
Class<? extends Annotation> variableAnnotationClass = ConfigUtils.extractAnnotationClass(
member, VARIABLE_ANNOTATION_CLASSES);
if (variableAnnotationClass != null) {
MemberAccessorFactory.MemberAccessorType memberAccessorType;
if (variableAnnotationClass.equals(CustomShadowVariable.class)) {
memberAccessorType = FIELD_OR_GETTER_METHOD;
} else {
memberAccessorType = FIELD_OR_GETTER_METHOD_WITH_SETTER;
}
MemberAccessor memberAccessor = MemberAccessorFactory.buildMemberAccessor(
member, memberAccessorType, variableAnnotationClass);
registerVariableAccessor(descriptorPolicy, variableAnnotationClass, memberAccessor);
}
}
private void registerVariableAccessor(DescriptorPolicy descriptorPolicy,
Class<? extends Annotation> variableAnnotationClass, MemberAccessor memberAccessor) {
String memberName = memberAccessor.getName();
if (declaredGenuineVariableDescriptorMap.containsKey(memberName)
|| declaredShadowVariableDescriptorMap.containsKey(memberName)) {
VariableDescriptor<Solution_> duplicate = declaredGenuineVariableDescriptorMap.get(memberName);
if (duplicate == null) {
duplicate = declaredShadowVariableDescriptorMap.get(memberName);
}
throw new IllegalStateException("The entityClass (" + entityClass
+ ") has a " + variableAnnotationClass.getSimpleName()
+ " annotated member (" + memberAccessor
+ ") that is duplicated by another member for variableDescriptor (" + duplicate + ").\n"
+ "Maybe the annotation is defined on both the field and its getter.");
}
if (variableAnnotationClass.equals(PlanningVariable.class)) {
GenuineVariableDescriptor<Solution_> variableDescriptor = new GenuineVariableDescriptor<>(this,
memberAccessor);
declaredGenuineVariableDescriptorMap.put(memberName, variableDescriptor);
} else if (variableAnnotationClass.equals(InverseRelationShadowVariable.class)) {
ShadowVariableDescriptor<Solution_> variableDescriptor = new InverseRelationShadowVariableDescriptor<>(
this, memberAccessor);
declaredShadowVariableDescriptorMap.put(memberName, variableDescriptor);
} else if (variableAnnotationClass.equals(AnchorShadowVariable.class)) {
ShadowVariableDescriptor<Solution_> variableDescriptor = new AnchorShadowVariableDescriptor<>(
this, memberAccessor);
declaredShadowVariableDescriptorMap.put(memberName, variableDescriptor);
} else if (variableAnnotationClass.equals(CustomShadowVariable.class)) {
ShadowVariableDescriptor<Solution_> variableDescriptor = new CustomShadowVariableDescriptor<>(
this, memberAccessor);
declaredShadowVariableDescriptorMap.put(memberName, variableDescriptor);
} else {
throw new IllegalStateException("The variableAnnotationClass ("
+ variableAnnotationClass + ") is not implemented.");
}
}
private void processPlanningPinAnnotation(DescriptorPolicy descriptorPolicy, Member member) {
if (((AnnotatedElement) member).isAnnotationPresent(PlanningPin.class)) {
MemberAccessor memberAccessor = MemberAccessorFactory.buildMemberAccessor(
member, FIELD_OR_READ_METHOD, PlanningPin.class);
Class<?> type = memberAccessor.getType();
if (!Boolean.TYPE.isAssignableFrom(type) && !Boolean.class.isAssignableFrom(type)) {
throw new IllegalStateException("The entityClass (" + entityClass
+ ") has a " + PlanningPin.class.getSimpleName()
+ " annotated member (" + memberAccessor
+ ") that is not a boolean or Boolean.");
}
declaredPinEntityFilterList.add(new PinEntityFilter(memberAccessor));
}
}
private void processVariableAnnotations(DescriptorPolicy descriptorPolicy) {
for (GenuineVariableDescriptor<Solution_> variableDescriptor : declaredGenuineVariableDescriptorMap.values()) {
variableDescriptor.processAnnotations(descriptorPolicy);
}
for (ShadowVariableDescriptor<Solution_> variableDescriptor : declaredShadowVariableDescriptorMap.values()) {
variableDescriptor.processAnnotations(descriptorPolicy);
}
}
public void linkEntityDescriptors(DescriptorPolicy descriptorPolicy) {
investigateParentsToLinkInherited(entityClass);
createEffectiveVariableDescriptorMaps();
createEffectiveMovableEntitySelectionFilter();
// linkVariableDescriptors() is in a separate loop
}
private void investigateParentsToLinkInherited(Class<?> investigateClass) {
inheritedEntityDescriptorList = new ArrayList<>(4);
if (investigateClass == null || investigateClass.isArray()) {
return;
}
linkInherited(investigateClass.getSuperclass());
for (Class<?> superInterface : investigateClass.getInterfaces()) {
linkInherited(superInterface);
}
}
private void linkInherited(Class<?> potentialEntityClass) {
EntityDescriptor<Solution_> entityDescriptor = solutionDescriptor.getEntityDescriptorStrict(
potentialEntityClass);
if (entityDescriptor != null) {
inheritedEntityDescriptorList.add(entityDescriptor);
} else {
investigateParentsToLinkInherited(potentialEntityClass);
}
}
private void createEffectiveVariableDescriptorMaps() {
effectiveGenuineVariableDescriptorMap = new LinkedHashMap<>(declaredGenuineVariableDescriptorMap.size());
effectiveShadowVariableDescriptorMap = new LinkedHashMap<>(declaredShadowVariableDescriptorMap.size());
for (EntityDescriptor<Solution_> inheritedEntityDescriptor : inheritedEntityDescriptorList) {
effectiveGenuineVariableDescriptorMap.putAll(inheritedEntityDescriptor.getGenuineVariableDescriptorMap());
effectiveShadowVariableDescriptorMap.putAll(inheritedEntityDescriptor.getShadowVariableDescriptorMap());
}
effectiveGenuineVariableDescriptorMap.putAll(declaredGenuineVariableDescriptorMap);
effectiveShadowVariableDescriptorMap.putAll(declaredShadowVariableDescriptorMap);
effectiveVariableDescriptorMap = new LinkedHashMap<>(
effectiveGenuineVariableDescriptorMap.size() + effectiveShadowVariableDescriptorMap.size());
effectiveVariableDescriptorMap.putAll(effectiveGenuineVariableDescriptorMap);
effectiveVariableDescriptorMap.putAll(effectiveShadowVariableDescriptorMap);
}
private void createEffectiveMovableEntitySelectionFilter() {
if (declaredMovableEntitySelectionFilter != null && !hasAnyDeclaredGenuineVariableDescriptor()) {
throw new IllegalStateException("The entityClass (" + entityClass
+ ") has a movableEntitySelectionFilterClass (" + declaredMovableEntitySelectionFilter.getClass()
+ "), but it has no declared genuine variables, only shadow variables.");
}
List<SelectionFilter> selectionFilterList = new ArrayList<>();
// TODO Also add in child entity selectors
for (EntityDescriptor<Solution_> inheritedEntityDescriptor : inheritedEntityDescriptorList) {
if (inheritedEntityDescriptor.hasEffectiveMovableEntitySelectionFilter()) {
// Includes movable and pinned
selectionFilterList.add(inheritedEntityDescriptor.getEffectiveMovableEntitySelectionFilter());
}
}
if (declaredMovableEntitySelectionFilter != null) {
selectionFilterList.add(declaredMovableEntitySelectionFilter);
}
selectionFilterList.addAll(declaredPinEntityFilterList);
if (selectionFilterList.isEmpty()) {
effectiveMovableEntitySelectionFilter = null;
} else if (selectionFilterList.size() == 1) {
effectiveMovableEntitySelectionFilter = selectionFilterList.get(0);
} else {
effectiveMovableEntitySelectionFilter = new CompositeSelectionFilter(selectionFilterList);
}
}
public void linkVariableDescriptors(DescriptorPolicy descriptorPolicy) {
for (GenuineVariableDescriptor<Solution_> variableDescriptor : declaredGenuineVariableDescriptorMap.values()) {
variableDescriptor.linkVariableDescriptors(descriptorPolicy);
}
for (ShadowVariableDescriptor<Solution_> shadowVariableDescriptor : declaredShadowVariableDescriptorMap.values()) {
shadowVariableDescriptor.linkVariableDescriptors(descriptorPolicy);
}
}
// ************************************************************************
// Worker methods
// ************************************************************************
public SolutionDescriptor<Solution_> getSolutionDescriptor() {
return solutionDescriptor;
}
public Class<?> getEntityClass() {
return entityClass;
}
public boolean matchesEntity(Object entity) {
return entityClass.isAssignableFrom(entity.getClass());
}
public boolean hasEffectiveMovableEntitySelectionFilter() {
return effectiveMovableEntitySelectionFilter != null;
}
public SelectionFilter getEffectiveMovableEntitySelectionFilter() {
return effectiveMovableEntitySelectionFilter;
}
public SelectionSorter getDecreasingDifficultySorter() {
return decreasingDifficultySorter;
}
public boolean hasAnyDeclaredGenuineVariableDescriptor() {
return !declaredGenuineVariableDescriptorMap.isEmpty();
}
public Collection<String> getGenuineVariableNameSet() {
return effectiveGenuineVariableDescriptorMap.keySet();
}
public Map<String, GenuineVariableDescriptor<Solution_>> getGenuineVariableDescriptorMap() {
return effectiveGenuineVariableDescriptorMap;
}
public Collection<GenuineVariableDescriptor<Solution_>> getGenuineVariableDescriptors() {
return effectiveGenuineVariableDescriptorMap.values();
}
public List<GenuineVariableDescriptor<Solution_>> getGenuineVariableDescriptorList() {
// TODO We might want to cache that list
return new ArrayList<>(effectiveGenuineVariableDescriptorMap.values());
}
public boolean hasGenuineVariableDescriptor(String variableName) {
return effectiveGenuineVariableDescriptorMap.containsKey(variableName);
}
public GenuineVariableDescriptor<Solution_> getGenuineVariableDescriptor(String variableName) {
return effectiveGenuineVariableDescriptorMap.get(variableName);
}
public Map<String, ShadowVariableDescriptor<Solution_>> getShadowVariableDescriptorMap() {
return effectiveShadowVariableDescriptorMap;
}
public Collection<ShadowVariableDescriptor<Solution_>> getShadowVariableDescriptors() {
return effectiveShadowVariableDescriptorMap.values();
}
public boolean hasShadowVariableDescriptor(String variableName) {
return effectiveShadowVariableDescriptorMap.containsKey(variableName);
}
public ShadowVariableDescriptor<Solution_> getShadowVariableDescriptor(String variableName) {
return effectiveShadowVariableDescriptorMap.get(variableName);
}
public Map<String, VariableDescriptor<Solution_>> getVariableDescriptorMap() {
return effectiveVariableDescriptorMap;
}
public Collection<VariableDescriptor<Solution_>> getVariableDescriptors() {
return effectiveVariableDescriptorMap.values();
}
public boolean hasVariableDescriptor(String variableName) {
return effectiveVariableDescriptorMap.containsKey(variableName);
}
public VariableDescriptor<Solution_> getVariableDescriptor(String variableName) {
return effectiveVariableDescriptorMap.get(variableName);
}
public Collection<GenuineVariableDescriptor<Solution_>> getDeclaredGenuineVariableDescriptors() {
return declaredGenuineVariableDescriptorMap.values();
}
public Collection<ShadowVariableDescriptor<Solution_>> getDeclaredShadowVariableDescriptors() {
return declaredShadowVariableDescriptorMap.values();
}
public Collection<VariableDescriptor<Solution_>> getDeclaredVariableDescriptors() {
Collection<VariableDescriptor<Solution_>> variableDescriptors = new ArrayList<>(
declaredGenuineVariableDescriptorMap.size() + declaredShadowVariableDescriptorMap.size());
variableDescriptors.addAll(declaredGenuineVariableDescriptorMap.values());
variableDescriptors.addAll(declaredShadowVariableDescriptorMap.values());
return variableDescriptors;
}
public String buildInvalidVariableNameExceptionMessage(String variableName) {
if (!ReflectionHelper.hasGetterMethod(entityClass, variableName)
&& !ReflectionHelper.hasField(entityClass, variableName)) {
String exceptionMessage = "The variableName (" + variableName
+ ") for entityClass (" + entityClass
+ ") does not exists as a getter or field on that class.\n"
+ "Check the spelling of the variableName (" + variableName + ").";
if (variableName.length() >= 2
&& !Character.isUpperCase(variableName.charAt(0))
&& Character.isUpperCase(variableName.charAt(1))) {
String correctedVariableName = variableName.substring(0, 1).toUpperCase() + variableName.substring(1);
exceptionMessage += "Maybe it needs to be correctedVariableName (" + correctedVariableName
+ ") instead, if it's a getter, because the JavaBeans spec states that "
+ "the first letter should be a upper case if the second is upper case.";
}
return exceptionMessage;
}
return "The variableName (" + variableName
+ ") for entityClass (" + entityClass
+ ") exists as a getter or field on that class,"
+ " but isn't in the planning variables (" + effectiveVariableDescriptorMap.keySet() + ").\n"
+ (Character.isUpperCase(variableName.charAt(0))
? "Maybe the variableName (" + variableName + ") should start with a lowercase.\n"
: "")
+ "Maybe your planning entity's getter or field lacks a " + PlanningVariable.class.getSimpleName()
+ " annotation or a shadow variable annotation.";
}
public boolean hasAnyGenuineVariables() {
return !effectiveGenuineVariableDescriptorMap.isEmpty();
}
public boolean hasAnyChainedGenuineVariables() {
for (GenuineVariableDescriptor<Solution_> variableDescriptor : effectiveGenuineVariableDescriptorMap.values()) {
if (!variableDescriptor.isChained()) {
return true;
}
}
return false;
}
// ************************************************************************
// Extraction methods
// ************************************************************************
public List<Object> extractEntities(Solution_ solution) {
return solutionDescriptor.getEntityListByEntityClass(solution, entityClass);
}
public long getGenuineVariableCount() {
return effectiveGenuineVariableDescriptorMap.size();
}
public long getMaximumValueCount(Solution_ solution, Object entity) {
long maximumValueCount = 0L;
for (GenuineVariableDescriptor<Solution_> variableDescriptor : effectiveGenuineVariableDescriptorMap.values()) {
maximumValueCount = Math.max(maximumValueCount, variableDescriptor.getValueCount(solution, entity));
}
return maximumValueCount;
}
public long getProblemScale(Solution_ solution, Object entity) {
long problemScale = 1L;
for (GenuineVariableDescriptor<Solution_> variableDescriptor : effectiveGenuineVariableDescriptorMap.values()) {
problemScale *= variableDescriptor.getValueCount(solution, entity);
}
return problemScale;
}
public int countUninitializedVariables(Object entity) {
int count = 0;
for (GenuineVariableDescriptor<Solution_> variableDescriptor : effectiveGenuineVariableDescriptorMap.values()) {
if (!variableDescriptor.isInitialized(entity)) {
count++;
}
}
return count;
}
public boolean isInitialized(Object entity) {
for (GenuineVariableDescriptor<Solution_> variableDescriptor : effectiveGenuineVariableDescriptorMap.values()) {
if (!variableDescriptor.isInitialized(entity)) {
return false;
}
}
return true;
}
public int countReinitializableVariables(ScoreDirector<Solution_> scoreDirector, Object entity) {
int count = 0;
for (GenuineVariableDescriptor<Solution_> variableDescriptor : effectiveGenuineVariableDescriptorMap.values()) {
if (variableDescriptor.isReinitializable(scoreDirector, entity)) {
count++;
}
}
return count;
}
public boolean isMovable(ScoreDirector<Solution_> scoreDirector, Object entity) {
return effectiveMovableEntitySelectionFilter == null
|| effectiveMovableEntitySelectionFilter.accept(scoreDirector, entity);
}
/**
* @param scoreDirector never null
* @param entity never null
* @return true if the entity is initialized or pinned
*/
public boolean isEntityInitializedOrPinned(ScoreDirector<Solution_> scoreDirector, Object entity) {
return isInitialized(entity) || !isMovable(scoreDirector, entity);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(" + entityClass.getName() + ")";
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.device.mgt.mobile.windows.api.operations.util;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONException;
import org.json.JSONObject;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.device.mgt.common.DeviceIdentifier;
import org.wso2.carbon.device.mgt.common.DeviceManagementException;
import org.wso2.carbon.device.mgt.common.device.details.DeviceInfo;
import org.wso2.carbon.device.mgt.common.device.details.DeviceLocation;
import org.wso2.carbon.device.mgt.common.notification.mgt.Notification;
import org.wso2.carbon.device.mgt.common.notification.mgt.NotificationManagementException;
import org.wso2.carbon.device.mgt.common.notification.mgt.NotificationManagementService;
import org.wso2.carbon.device.mgt.common.operation.mgt.Operation;
import org.wso2.carbon.device.mgt.common.operation.mgt.OperationManagementException;
import org.wso2.carbon.device.mgt.core.device.details.mgt.DeviceDetailsMgtException;
import org.wso2.carbon.device.mgt.mobile.windows.api.common.PluginConstants;
import org.wso2.carbon.device.mgt.mobile.windows.api.common.util.WindowsAPIUtils;
import org.wso2.carbon.device.mgt.mobile.windows.api.services.syncml.beans.Profile;
import org.wso2.carbon.device.mgt.mobile.windows.api.operations.*;
import org.wso2.carbon.policy.mgt.common.PolicyManagementException;
import org.wso2.carbon.device.mgt.common.policy.mgt.ProfileFeature;
import org.wso2.carbon.device.mgt.common.policy.mgt.monitor.ComplianceFeature;
import org.wso2.carbon.device.mgt.common.policy.mgt.monitor.PolicyComplianceException;
import java.util.ArrayList;
import java.util.List;
import static org.wso2.carbon.device.mgt.mobile.windows.api.common.util.WindowsAPIUtils.convertToDeviceIdentifierObject;
/**
* This class is used to handle pending operations of the device.
*/
public class OperationHandler {
private static Log log = LogFactory.getLog(
org.wso2.carbon.device.mgt.mobile.windows.api.operations.util.OperationHandler.class);
/**
* Update the operations using device status payload.
*
* @param status Client side status for the specific operations.
* @param syncmlDocument syncml payload for operation status which parse through the syncml engine.
* @param deviceIdentifier specific device identifier for each device.
* @throws OperationManagementException
*/
public void updateDeviceOperations(StatusTag status, SyncmlDocument syncmlDocument,
DeviceIdentifier deviceIdentifier) throws OperationManagementException {
List<? extends Operation> pendingDataOperations;
try {
pendingDataOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
if (Constants.SyncMLResponseCodes.ACCEPTED.equals(status.getData()) ||
(Constants.SyncMLResponseCodes.ACCEPTED_FOR_PROCESSING.equals(status.getData()))) {
for (Operation operation : pendingDataOperations) {
if (operation.getId() == status.getCommandReference()) {
operation.setStatus(Operation.Status.COMPLETED);
}
}
if (syncmlDocument.getHeader().getSource().getLocURI() != null) {
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(), pendingDataOperations);
}
} else if (Constants.SyncMLResponseCodes.PIN_NOTFOUND.equals(status.getData())) {
for (Operation operation : pendingDataOperations) {
if (operation.getId() == status.getCommandReference() &&
(PluginConstants.OperationCodes.DEVICE_LOCK.equals(operation.getCode()))) {
operation.setStatus(Operation.Status.ERROR);
if (syncmlDocument.getHeader().getSource().getLocURI() != null) {
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(), pendingDataOperations);
}
NotificationManagementService nmService = WindowsAPIUtils.getNotificationManagementService();
Notification lockResetNotification = new Notification();
lockResetNotification.setOperationId(status.getCommandReference());
lockResetNotification.setStatus(String.valueOf(Notification.Status.NEW));
lockResetNotification.setDescription(
Constants.SyncMLResponseCodes.LOCK_RESET_NOTIFICATION);
nmService.addNotification(deviceIdentifier, lockResetNotification);
}
}
}
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred in getting pending operations.");
} catch (NotificationManagementException e) {
throw new OperationManagementException("Error occurred while adding notification", e);
}
}
/**
* Update operation statuses.
*
* @param deviceId specific device Id.
* @param operations operation list to be update.
* @throws OperationManagementException
*/
public static void updateStatus(String deviceId, List<? extends Operation> operations)
throws OperationManagementException {
for (Operation operation : operations) {
WindowsAPIUtils.updateOperation(deviceId, operation);
if (log.isDebugEnabled()) {
log.debug("Updating operation '" + operation.toString() + "'");
}
}
}
/**
* Update Status of the lock operation.
*
* @param status Status of the operation.
* @param syncmlDocument parsed syncml payload.
* @param deviceIdentifier Device Id.
* @throws OperationManagementException
*/
public void updateLockOperation(StatusTag status, SyncmlDocument syncmlDocument, DeviceIdentifier deviceIdentifier)
throws OperationManagementException {
List<? extends Operation> pendingDataOperations;
try {
pendingDataOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
if (Constants.SyncMLResponseCodes.ACCEPTED.equals(status.getData())) {
for (Operation operation : pendingDataOperations) {
if ((OperationCode.Command.DEVICE_LOCK.getCode().equals(operation.getCode()))
&& operation.getId() == status.getCommandReference()) {
operation.setStatus(Operation.Status.COMPLETED);
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(), pendingDataOperations);
}
}
}
if (Constants.SyncMLResponseCodes.PIN_NOTFOUND.equals(status.getData())) {
for (Operation operation : pendingDataOperations) {
if ((OperationCode.Command.DEVICE_LOCK.getCode().equals(operation.getCode()) &&
operation.getId() == status.getCommandReference())) {
operation.setStatus(Operation.Status.ERROR);
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(), pendingDataOperations);
NotificationManagementService nmService = WindowsAPIUtils.getNotificationManagementService();
Notification lockResetNotification = new Notification();
lockResetNotification.setOperationId(status.getCommandReference());
lockResetNotification.setStatus(String.valueOf(Notification.Status.NEW));
lockResetNotification.setDescription(Constants.SyncMLResponseCodes.LOCK_RESET_NOTIFICATION);
nmService.addNotification(deviceIdentifier, lockResetNotification);
}
}
}
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred in getting pending operations.");
} catch (NotificationManagementException e) {
throw new OperationManagementException("Error occurred in adding notifications.");
}
}
/**
* Update status of the ring operation.
*
* @param status Ring status of the device.
* @param syncmlDocument Parsed syncml payload from the syncml engine.
* @param deviceIdentifier specific device id to be update.
* @throws OperationManagementException
*/
public void ring(StatusTag status, SyncmlDocument syncmlDocument, DeviceIdentifier deviceIdentifier)
throws OperationManagementException {
List<? extends Operation> pendingDataOperations;
try {
if ((Constants.SyncMLResponseCodes.ACCEPTED.equals(status.getData()))) {
pendingDataOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
for (Operation operation : pendingDataOperations) {
if ((OperationCode.Command.DEVICE_RING.getCode().equals(operation.getCode())) &&
(operation.getId() == status.getCommandReference())) {
operation.setStatus(Operation.Status.COMPLETED);
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(),
pendingDataOperations);
}
}
}
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred in getting pending operation.");
}
}
/**
* Update the status of the DataWipe operation.
*
* @param status Status of the data wipe.
* @param syncmlDocument Parsed syncml payload from the syncml engine.
* @param deviceIdentifier specific device id to be wiped.
* @throws OperationManagementException
*/
public void dataWipe(StatusTag status, SyncmlDocument syncmlDocument, DeviceIdentifier deviceIdentifier)
throws OperationManagementException {
List<? extends Operation> pendingDataOperations;
if ((Constants.SyncMLResponseCodes.ACCEPTED.equals(status.getData()))) {
try {
pendingDataOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred in getting pending operation.");
}
for (Operation operation : pendingDataOperations) {
if ((OperationCode.Command.WIPE_DATA.getCode().equals(operation.getCode())) &&
(operation.getId() == status.getCommandReference())) {
operation.setStatus(Operation.Status.COMPLETED);
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(),
pendingDataOperations);
}
}
}
}
public void updateDeviceInfoStatus(DeviceIdentifier deviceIdentifier) throws OperationManagementException {
List<? extends Operation> pendingDeviceInfoOperations;
try {
pendingDeviceInfoOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred while getting pending operations.");
}
for (Operation operation : pendingDeviceInfoOperations) {
if (PluginConstants.OperationCodes.DEVICE_INFO.equals(operation.getCode())) {
operation.setStatus(Operation.Status.COMPLETED);
updateStatus(deviceIdentifier.getId(), pendingDeviceInfoOperations);
}
}
}
public void updateDeviceLocationStatus(SyncmlDocument syncmlDocument) throws OperationManagementException {
List<? extends Operation> pendingDataOperations;
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(
syncmlDocument.getHeader().getSource().getLocURI());
try {
pendingDataOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred in getting pending operation.");
}
for (Operation operation : pendingDataOperations) {
if (PluginConstants.OperationCodes.DEVICE_LOCATION.equals(operation.getCode())) {
if (syncmlDocument.getBody().getResults() != null) {
List<ItemTag> items = syncmlDocument.getBody().getResults().getItem();
for (ItemTag itemTag : items) {
if (OperationCode.Command.LATITUDE.getCode().equals(itemTag.getSource().getLocURI())) {
// at this moment we can't get accepted value 200 from the device.
if (itemTag.getData() != null) {
operation.setStatus(Operation.Status.COMPLETED);
} else {
operation.setStatus(Operation.Status.ERROR);
}
}
}
}
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(),
pendingDataOperations);
}
}
}
/**
* Get pending operations.
*
* @param syncmlDocument SyncmlDocument object which creates from the syncml engine using syncml payload
* @return Return list of pending operations.
* @throws OperationManagementException
*/
public List<? extends Operation> getPendingOperations(SyncmlDocument syncmlDocument)
throws OperationManagementException, WindowsOperationException {
SyncmlHeader syncmlHeader = syncmlDocument.getHeader();
SyncmlBody syncmlBody = syncmlDocument.getBody();
List<? extends Operation> pendingOperations;
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(syncmlHeader.getSource().getLocURI());
if (syncmlBody.getResults() != null) {
List<ItemTag> items = syncmlBody.getResults().getItem();
for (ItemTag itemTag : items) {
if (OperationCode.Command.LATITUDE.getCode().equals(itemTag.getSource().getLocURI())) {
updateLocation(syncmlDocument);
}
if (OperationCode.Command.TOTAL_RAM.getCode().equals(itemTag.getSource().getLocURI())) {
updateDeviceInfo(syncmlDocument);
}
}
}
UpdateUriOperations(syncmlDocument);
generateComplianceFeatureStatus(syncmlDocument);
pendingOperations = WindowsAPIUtils.getDeviceManagementService().getPendingOperations(deviceIdentifier);
return pendingOperations;
}
/**
* Set compliance of the feature according to the device status for the specific feature.
*
* @param activeFeature Features to be applied on the device.
* @param deviceFeature Actual features applied on the device.
* @return Returns setting up compliance feature.
*/
public ComplianceFeature setComplianceFeatures(ProfileFeature activeFeature, Profile deviceFeature) {
ComplianceFeature complianceFeature = new ComplianceFeature();
complianceFeature.setFeature(activeFeature);
complianceFeature.setFeatureCode(activeFeature.getFeatureCode());
complianceFeature.setCompliance(deviceFeature.isCompliance());
return complianceFeature;
}
/**
* Update the completed/Error status of the operation which have the URI of the operation code in the syncml payload.
*
* @param syncmlDocument SyncmlDocument object generated from the the syncml engine.
* @throws OperationManagementException
*/
public void UpdateUriOperations(SyncmlDocument syncmlDocument) throws OperationManagementException,
WindowsOperationException {
List<? extends Operation> pendingDataOperations;
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(
syncmlDocument.getHeader().getSource().getLocURI());
List<StatusTag> statuses = syncmlDocument.getBody().getStatus();
try {
pendingDataOperations = WindowsAPIUtils.getPendingOperations(deviceIdentifier);
} catch (DeviceManagementException e) {
throw new OperationManagementException("Error occurred in getting pending operation.");
}
for (StatusTag status : statuses) {
if ((Constants.EXECUTE.equals(status.getCommand()))) {
if (status.getTargetReference() == null) {
updateDeviceOperations(status, syncmlDocument, deviceIdentifier);
} else {
if ((OperationCode.Command.DEVICE_LOCK.getCode().equals(status.getTargetReference()))) {
updateLockOperation(status, syncmlDocument, deviceIdentifier);
}
if ((OperationCode.Command.DEVICE_RING.getCode().equals(status.getTargetReference()))) {
ring(status, syncmlDocument, deviceIdentifier);
}
if ((OperationCode.Command.WIPE_DATA.getCode().equals(status.getTargetReference()))) {
dataWipe(status, syncmlDocument, deviceIdentifier);
}
}
}
if ((Constants.SEQUENCE.equals(status.getCommand()))) {
if ((Constants.SyncMLResponseCodes.ACCEPTED.equals(status.getData()))) {
for (Operation operation : pendingDataOperations) {
if ((PluginConstants.OperationCodes.POLICY_BUNDLE.equals(operation.getCode())) &&
operation.getId() == status.getCommandReference()) {
operation.setStatus(Operation.Status.COMPLETED);
}
if ((PluginConstants.OperationCodes.MONITOR.equals(operation.getCode())) &&
operation.getId() == status.getCommandReference()) {
operation.setStatus(Operation.Status.COMPLETED);
}
if (PluginConstants.OperationCodes.POLICY_REVOKE.equals(operation.getCode())) {
operation.setStatus(Operation.Status.COMPLETED);
}
}
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(),
pendingDataOperations);
} else {
for (Operation operation : pendingDataOperations) {
if ((PluginConstants.OperationCodes.POLICY_BUNDLE.equals(operation.getCode())) &&
operation.getId() == status.getCommandReference()) {
operation.setStatus(Operation.Status.ERROR);
}
if ((PluginConstants.OperationCodes.MONITOR.equals(operation.getCode())) &&
operation.getId() == status.getCommandReference()) {
operation.setStatus(Operation.Status.ERROR);
}
if (PluginConstants.OperationCodes.POLICY_REVOKE.equals(operation.getCode())) {
operation.setStatus(Operation.Status.COMPLETED);
}
}
updateStatus(syncmlDocument.getHeader().getSource().getLocURI(),
pendingDataOperations);
}
}
}
}
/**
* Generate status of the features that have been activated on the device.
*
* @param syncmlDocument syncmlDocument object pasrsed from the syncml engine.
* @return device statuses for the activated features
* @throws WindowsOperationException
*/
public List<Profile> generateDeviceOperationStatusObject(SyncmlDocument syncmlDocument) throws
WindowsOperationException {
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(
syncmlDocument.getHeader().getSource().getLocURI());
String lockUri = null;
ResultsTag result = syncmlDocument.getBody().getResults();
List<Profile> profiles = new ArrayList<>();
if (result != null) {
List<ItemTag> results = result.getItem();
for (OperationCode.Info info : OperationCode.Info.values()) {
if (PluginConstants.OperationCodes.PIN_CODE.equals(info
.name())) {
lockUri = info.getCode();
}
}
for (ItemTag item : results) {
for (OperationCode.Info info : OperationCode.Info.values()) {
if (item.getSource().getLocURI().equals(info.getCode()) &&
PluginConstants.OperationCodes.CAMERA_STATUS.equals(info.name())) {
Profile cameraProfile = new Profile();
cameraProfile.setFeatureCode(PluginConstants.OperationCodes.CAMERA);
cameraProfile.setData(item.getData());
if ((PluginConstants.SyncML.SYNCML_DATA_ONE.equals(item.getData()))) {
cameraProfile.setEnable(true);
} else {
cameraProfile.setEnable(false);
}
profiles.add(cameraProfile);
}
if (item.getSource().getLocURI().equals(info.getCode()) &&
PluginConstants.OperationCodes.ENCRYPT_STORAGE_STATUS.equals(info.name())) {
Profile encryptStorage = new Profile();
encryptStorage.setFeatureCode(PluginConstants.OperationCodes.ENCRYPT_STORAGE);
encryptStorage.setData(item.getData());
if ((PluginConstants.SyncML.SYNCML_DATA_ONE.equals(item.getData()))) {
encryptStorage.setEnable(true);
} else {
encryptStorage.setEnable(false);
}
profiles.add(encryptStorage);
}
if (item.getSource().getLocURI().equals(info.getCode()) &&
PluginConstants.OperationCodes.DEVICE_PASSWORD_STATUS.equals(info.name())) {
Profile encryptStorage = new Profile();
encryptStorage.setFeatureCode(PluginConstants.OperationCodes.PASSCODE_POLICY);
encryptStorage.setData(item.getData());
if ((PluginConstants.SyncML.SYNCML_DATA_ZERO.equals(item.getData()))) {
encryptStorage.setEnable(true);
} else {
encryptStorage.setEnable(false);
}
profiles.add(encryptStorage);
}
if (!item.getData().isEmpty() && item.getSource().getLocURI().equals(lockUri)) {
String pinValue = item.getData();
NotificationManagementService nmService = WindowsAPIUtils.getNotificationManagementService();
Notification notification = new Notification();
notification.setDescription("Auto generated DevicePin : " + pinValue);
notification.setOperationId(result.getCommandReference());
notification.setStatus(String.valueOf(Notification.Status.NEW));
try {
nmService.addNotification(deviceIdentifier, notification);
} catch (NotificationManagementException e) {
throw new WindowsOperationException("Failure Occurred while getting notification" +
" service.", e);
}
break;
}
}
}
}
return profiles;
}
/**
* Generate Compliance Features.
*
* @param syncmlDocument syncmlDocument object parsed from the syncml engine.
* @throws WindowsOperationException
*/
public void generateComplianceFeatureStatus(SyncmlDocument syncmlDocument) throws WindowsOperationException {
List<Profile> profiles = generateDeviceOperationStatusObject(syncmlDocument);
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(
syncmlDocument.getHeader().getSource().getLocURI());
boolean isCompliance = false;
if (profiles.size() != Constants.EMPTY) {
try {
if (WindowsAPIUtils.getPolicyManagerService().getAppliedPolicyToDevice(deviceIdentifier).getProfile().
getProfileFeaturesList() != null) {
List<ProfileFeature> profileFeatures = WindowsAPIUtils.getPolicyManagerService().
getAppliedPolicyToDevice(deviceIdentifier).getProfile().getProfileFeaturesList();
List<ComplianceFeature> complianceFeatures = new ArrayList<>();
for (ProfileFeature activeFeature : profileFeatures) {
JSONObject policyContent = new JSONObject(activeFeature.getContent().toString());
for (Profile deviceFeature : profiles) {
if (deviceFeature.getFeatureCode().equals(activeFeature.getFeatureCode()) &&
(PluginConstants.OperationCodes.CAMERA.equals(deviceFeature.getFeatureCode()))) {
if (policyContent.getBoolean(PluginConstants.PolicyConfigProperties.
POLICY_ENABLE) == (deviceFeature.isEnable())) {
isCompliance = true;
deviceFeature.setCompliance(isCompliance);
} else {
deviceFeature.setCompliance(isCompliance);
}
ComplianceFeature complianceFeature = setComplianceFeatures(activeFeature,
deviceFeature);
complianceFeatures.add(complianceFeature);
}
if (deviceFeature.getFeatureCode().equals(activeFeature.getFeatureCode()) &&
(PluginConstants.OperationCodes.
ENCRYPT_STORAGE.equals(deviceFeature.getFeatureCode()))) {
if (policyContent.getBoolean(PluginConstants.PolicyConfigProperties.
ENCRYPTED_ENABLE) == (deviceFeature.isEnable())) {
isCompliance = true;
deviceFeature.setCompliance(isCompliance);
} else {
deviceFeature.setCompliance(isCompliance);
}
ComplianceFeature complianceFeature = setComplianceFeatures(activeFeature,
deviceFeature);
complianceFeatures.add(complianceFeature);
}
if (deviceFeature.getFeatureCode().equals(activeFeature.getFeatureCode()) &&
(PluginConstants.OperationCodes.
PASSCODE_POLICY.equals(deviceFeature.getFeatureCode()))) {
if (policyContent.getBoolean(PluginConstants.PolicyConfigProperties.
ENABLE_PASSWORD) == (deviceFeature.isEnable())) {
isCompliance = true;
deviceFeature.setCompliance(isCompliance);
} else {
deviceFeature.setCompliance(isCompliance);
}
ComplianceFeature complianceFeature = setComplianceFeatures(activeFeature,
deviceFeature);
complianceFeatures.add(complianceFeature);
}
}
}
WindowsAPIUtils.getPolicyManagerService().checkPolicyCompliance(deviceIdentifier,
complianceFeatures);
}
} catch (JSONException e) {
throw new WindowsOperationException("Error occurred while parsing json object.", e);
} catch (PolicyComplianceException e) {
throw new WindowsOperationException("Error occurred while setting up policy compliance.", e);
} catch (PolicyManagementException e) {
throw new WindowsOperationException("Error occurred while getting effective policy.", e);
}
}
}
public void updateDeviceInfo(SyncmlDocument syncmlDocument) throws WindowsOperationException {
String softwareVersion;
String imsi;
String imei;
String model;
String vendor;
String totalRAM;
String deviceID = null;
String totalStorage;
List<ItemTag> deviceInformations = syncmlDocument.getBody().getResults().getItem();
DeviceInfo deviceInfo = new DeviceInfo();
for (ItemTag item : deviceInformations) {
String source = item.getSource().getLocURI();
if (OperationCode.Info.SOFTWARE_VERSION.getCode().equals(source)) {
softwareVersion = item.getData();
deviceInfo.setOsVersion(softwareVersion);
}
if (OperationCode.Info.IMSI.getCode().equals(source)) {
imsi = item.getData();
deviceInfo.setIMSI(imsi);
}
if (OperationCode.Info.IMEI.getCode().equals(source)) {
imei = item.getData();
deviceInfo.setIMEI(imei);
}
if (OperationCode.Info.DEVICE_MODEL.getCode().equals(source)) {
model = item.getData();
deviceInfo.setDeviceModel(model);
}
if (OperationCode.Info.VENDOR.getCode().equals(source)) {
vendor = item.getData();
deviceInfo.setVendor(vendor);
}
if (OperationCode.Info.TOTAL_RAM.getCode().equals(source)) {
totalRAM = item.getData();
deviceInfo.setAvailableRAMMemory(Double.parseDouble(totalRAM));
}
if (OperationCode.Info.TOTAL_STORAGE.getCode().equals(source)) {
totalStorage = item.getData();
deviceInfo.setInternalAvailableMemory(Double.parseDouble(totalStorage));
}
if (OperationCode.Info.DEV_ID.getCode().equals(source)) {
deviceID = item.getData();
}
}
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(deviceID);
try {
WindowsAPIUtils.updateDeviceInfo(deviceIdentifier, deviceInfo);
updateDeviceInfoStatus(deviceIdentifier);
} catch (org.wso2.carbon.device.mgt.core.device.details.mgt.DeviceDetailsMgtException e) {
throw new WindowsOperationException("Error occurred while adding Device info.");
} catch (OperationManagementException e) {
throw new WindowsOperationException("Error occurred while updating Device info operation status.");
}
}
private void updateLocation(SyncmlDocument syncmlDocument) throws WindowsOperationException {
List<ItemTag> deviceInformations = syncmlDocument.getBody().getResults().getItem();
DeviceIdentifier deviceIdentifier = convertToDeviceIdentifierObject(
syncmlDocument.getHeader().getSource().getLocURI());
DeviceLocation deviceLocation = new DeviceLocation();
deviceLocation.setDeviceIdentifier(deviceIdentifier);
for (ItemTag item : deviceInformations) {
String source = item.getSource().getLocURI();
if (OperationCode.Info.LONGITUDE.getCode().equals(source)) {
String longitude = item.getData();
deviceLocation.setLongitude(Double.parseDouble(longitude));
}
if (OperationCode.Info.LATITUDE.getCode().equals(source)) {
Double latitude = Double.parseDouble(item.getData());
deviceLocation.setLatitude(latitude);
}
}
try {
WindowsAPIUtils.updateDeviceLocation(deviceLocation);
updateDeviceLocationStatus(syncmlDocument);
} catch (DeviceDetailsMgtException e) {
throw new WindowsOperationException("Error occurred while updating Device Location.");
} catch (OperationManagementException e) {
throw new WindowsOperationException("Error occurred while updating Device Location operation status.");
}
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cdlflex.ui.fruit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.mockito.Mockito.spy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.wicket.model.Model;
import org.cdlflex.fruit.Filter;
import org.cdlflex.fruit.OrderBy;
import org.cdlflex.fruit.Repository;
import org.cdlflex.ui.fruit.model.FilterModel;
import org.cdlflex.ui.fruit.model.IFilterProvider;
import org.junit.Before;
import org.junit.Test;
public class FilterableRepositoryDataProviderTest {
FilterableRepositoryDataProvider<TestModel> provider;
Filter mockFilter;
Repository<TestModel> mockRepository;
TestModel model1 = new TestModel(1L);
TestModel model2 = new TestModel(2L);
TestModel model3 = new TestModel(3L);
@Before
public void setUp() throws Exception {
mockFilter = new Filter(); // simulates a query that returns model1 and model3
mockRepository = spy(new StubRepository());
provider = new FilterableRepositoryDataProvider<TestModel>() {
private static final long serialVersionUID = 1L;
@Override
public Repository<TestModel> getRepository() {
return mockRepository;
}
};
}
@Test
public void getFilter_withNoFilterSet_returnsNull() throws Exception {
assertNull(provider.getFilter());
}
@Test
public void getFilter_withFilterSet_returnsCorrectFilter() throws Exception {
provider.setFilterModel(Model.of(mockFilter));
assertSame(mockFilter, provider.getFilter());
}
@Test
public void getFilter_withFilterModelFromProvider_returnsCorrectFilter() throws Exception {
provider.setFilterModel(new FilterModel(new IFilterProvider() {
private static final long serialVersionUID = 1L;
@Override
public Filter getFilter() {
return mockFilter;
}
}));
assertSame(mockFilter, provider.getFilter());
}
@Test
public void iterator_withoutFilter_returnsCorrectModels() throws Exception {
Iterator<? extends TestModel> iterator = provider.iterator(0, 3);
assertSame(model1, iterator.next());
assertSame(model2, iterator.next());
assertSame(model3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void iterator_withFilter_returnsCorrectModels() throws Exception {
provider.setFilterModel(Model.of(mockFilter));
Iterator<? extends TestModel> iterator = provider.iterator(0, 2);
assertSame(model1, iterator.next());
assertSame(model3, iterator.next());
assertFalse(iterator.hasNext());
}
@Test
public void size_withoutFilter_returnsUnfilteredRepositorySize() throws Exception {
assertEquals(3L, provider.size());
}
@Test
public void size_withFilter_returnsFilteredRepositorySize() throws Exception {
provider.setFilterModel(Model.of(mockFilter));
assertEquals(2L, provider.size());
}
private class StubRepository implements Repository<TestModel> {
@Override
public TestModel create() {
return null;
}
@Override
public long count() {
return 3L;
}
@Override
public long count(Filter filter) {
if (filter == mockFilter) {
return 2;
} else {
throw new UnsupportedOperationException();
}
}
@Override
public void save(TestModel entity) {
}
@Override
public void save(Collection<TestModel> entity) {
}
@Override
public void remove(TestModel entity) {
}
@Override
public void remove(Collection<TestModel> entities) {
}
@Override
public TestModel get(Object key) {
if(!(key instanceof Integer)) {
return null;
}
Integer id = (Integer) key;
if (id == 1) {
return model1;
} else if (id == 2) {
return model2;
} else if (id == 3) {
return model3;
} else {
return null;
}
}
@Override
public List<TestModel> getAll() {
return new ArrayList<>(Arrays.asList(model1, model2, model3));
}
@Override
public List<TestModel> getAll(OrderBy order) {
return getAll();
}
@Override
public List<TestModel> getPage(int limit, int offset) {
return getAll();
}
@Override
public List<TestModel> getPage(OrderBy order, int limit, int offset) {
return getPage(limit, offset);
}
@Override
public List<TestModel> find(Filter filter) {
return (filter == mockFilter) ? new ArrayList<>(Arrays.asList(model1, model3))
: new ArrayList<TestModel>();
}
@Override
public List<TestModel> findPage(Filter filter, OrderBy order, int limit, int offset) {
if (filter == mockFilter) {
return new ArrayList<>(Arrays.asList(model1, model3)).subList(offset, offset + limit);
} else {
return new ArrayList<>();
}
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.filestoresult;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.List;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.filestoresult.FilesToResultMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class FilesToResultDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = FilesToResultMeta.class; // for i18n purposes, needed by Translator2!!
private Label wlFilenameField;
private CCombo wFilenameField;
private FormData fdlFilenameField, fdFilenameField;
private Label wlTypes;
private List wTypes;
private FormData fdlTypes, fdTypes;
private FilesToResultMeta input;
public FilesToResultDialog( Shell parent, Object in, TransMeta tr, String sname ) {
super( parent, (BaseStepMeta) in, tr, sname );
input = (FilesToResultMeta) in;
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.MIN | SWT.MAX | SWT.RESIZE );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "FilesToResultDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "FilesToResultDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// The rest...
// FilenameField line
wlFilenameField = new Label( shell, SWT.RIGHT );
wlFilenameField.setText( BaseMessages.getString( PKG, "FilesToResultDialog.FilenameField.Label" ) );
props.setLook( wlFilenameField );
fdlFilenameField = new FormData();
fdlFilenameField.left = new FormAttachment( 0, 0 );
fdlFilenameField.top = new FormAttachment( wStepname, margin );
fdlFilenameField.right = new FormAttachment( middle, -margin );
wlFilenameField.setLayoutData( fdlFilenameField );
wFilenameField = new CCombo( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wFilenameField.setToolTipText( BaseMessages.getString( PKG, "FilesToResultDialog.FilenameField.Tooltip" ) );
props.setLook( wFilenameField );
wFilenameField.addModifyListener( lsMod );
fdFilenameField = new FormData();
fdFilenameField.left = new FormAttachment( middle, 0 );
fdFilenameField.top = new FormAttachment( wStepname, margin );
fdFilenameField.right = new FormAttachment( 100, 0 );
wFilenameField.setLayoutData( fdFilenameField );
/*
* Get the field names from the previous steps, in the background though
*/
Runnable runnable = new Runnable() {
public void run() {
try {
RowMetaInterface inputfields = transMeta.getPrevStepFields( stepname );
if ( inputfields != null ) {
for ( int i = 0; i < inputfields.size(); i++ ) {
wFilenameField.add( inputfields.getValueMeta( i ).getName() );
}
}
} catch ( Exception ke ) {
new ErrorDialog( shell,
BaseMessages.getString( PKG, "FilesToResultDialog.FailedToGetFields.DialogTitle" ),
BaseMessages.getString( PKG, "FilesToResultDialog.FailedToGetFields.DialogMessage" ), ke );
}
}
};
display.asyncExec( runnable );
// Some buttons
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, null );
// Include Files?
wlTypes = new Label( shell, SWT.RIGHT );
wlTypes.setText( BaseMessages.getString( PKG, "FilesToResultDialog.TypeOfFile.Label" ) );
props.setLook( wlTypes );
fdlTypes = new FormData();
fdlTypes.left = new FormAttachment( 0, 0 );
fdlTypes.top = new FormAttachment( wFilenameField, margin );
fdlTypes.right = new FormAttachment( middle, -margin );
wlTypes.setLayoutData( fdlTypes );
wTypes = new List( shell, SWT.SINGLE | SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL );
wTypes.setToolTipText( BaseMessages.getString( PKG, "FilesToResultDialog.TypeOfFile.Tooltip" ) );
props.setLook( wTypes );
fdTypes = new FormData();
fdTypes.left = new FormAttachment( middle, 0 );
fdTypes.top = new FormAttachment( wFilenameField, margin );
fdTypes.bottom = new FormAttachment( wOK, -margin * 3 );
fdTypes.right = new FormAttachment( 100, 0 );
wTypes.setLayoutData( fdTypes );
for ( int i = 0; i < ResultFile.getAllTypeDesc().length; i++ ) {
wTypes.add( ResultFile.getAllTypeDesc()[i] );
}
// Add listeners
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
wCancel.addListener( SWT.Selection, lsCancel );
wOK.addListener( SWT.Selection, lsOK );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
wTypes.select( input.getFileType() );
if ( input.getFilenameField() != null ) {
wFilenameField.setText( input.getFilenameField() );
}
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
if ( Const.isEmpty( wStepname.getText() ) ) {
return;
}
stepname = wStepname.getText(); // return value
input.setFilenameField( wFilenameField.getText() );
if ( wTypes.getSelectionIndex() >= 0 ) {
input.setFileType( wTypes.getSelectionIndex() );
} else {
input.setFileType( ResultFile.FILE_TYPE_GENERAL );
}
dispose();
}
}
| |
package com.inepex.ineom.shared.assistedobject;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.inepex.ineom.shared.IFConsts;
import com.inepex.ineom.shared.IneList;
import com.inepex.ineom.shared.LazyHashMap;
import com.inepex.ineom.shared.Relation;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author Istvan Szoboszlai Class for representing an object with key-value
* pairs. Data is stored in 6 different maps, in each by type.
*/
@SuppressWarnings("serial")
public class KeyValueObject extends AssistedObject {
protected String descriptorName = null;
protected Map<String, Boolean> booleanValues = new LazyHashMap<>();
protected Map<String, Double> doubleValues = new LazyHashMap<>();
protected Map<String, IneList> listValues = new LazyHashMap<>();
protected Map<String, Long> longValues = new LazyHashMap<>();
protected Map<String, Relation> relationValues = new LazyHashMap<>();
protected Map<String, String> stringValues = new LazyHashMap<>();
protected Map<String, String> propJsons = new LazyHashMap<>();
/**
* Default constructor needed for the type to be serializable, although the
* other constructor that specifies descriptorName should be used
*/
public KeyValueObject() {}
/**
* This constructor should be used to initiate a new KVO
*
* @param descriptorName
*/
public KeyValueObject(String descriptorName) {
this.descriptorName = descriptorName;
}
/**
* idKVO constructor
*
*/
public KeyValueObject(String descriptorName, Long id) {
this.descriptorName = descriptorName;
setId(id);
}
@Override
public KeyValueObject clone() {
KeyValueObject kvo = new KeyValueObject(descriptorName);
this.copyValuesTo(kvo);
return kvo;
}
@Override
public void setDescriptorName(String descriptorName) {
this.descriptorName = descriptorName;
}
@Override
public String getDescriptorName() {
return descriptorName;
}
@Override
public Long getId() {
Long id = longValues.get(IFConsts.KEY_ID);
return id == null ? IFConsts.NEW_ITEM_ID : id;
}
@Override
public void setId(Long id) {
longValues.put(IFConsts.KEY_ID, id);
}
/**
* TODO unused yet
*
*/
protected boolean isDeleted() {
// TODO unused yet
Boolean deleted = booleanValues.get(IFConsts.KEY_ISDELETED);
return deleted != null || deleted;
}
/**
* TODO unused yet
*
*/
void setDeleleted(boolean b) {
// TODO unused yet
booleanValues.put(IFConsts.KEY_ISDELETED, b);
}
@JsonIgnore
@Override
public boolean isNew() {
return getId().equals(IFConsts.NEW_ITEM_ID);
}
@Override
public String toString() {
return String.valueOf(booleanValues) + "\n" +
doubleValues + "\n" +
listValues + "\n" +
longValues + "\n" +
relationValues + "\n" +
stringValues + "\n" +
propJsons + "\n";
}
@Override
protected void set(String key, Boolean value) {
booleanValues.put(key, value);
}
@Override
protected void set(String key, Double value) {
doubleValues.put(key, value);
}
@Override
protected void set(String key, IneList value) {
listValues.put(key, value);
}
@Override
protected void set(String key, Long value) {
longValues.put(key, value);
}
@Override
protected void set(String key, Relation value) {
relationValues.put(key, value);
}
@Override
protected void set(String key, String value) {
stringValues.put(key, value);
}
@Override
protected void unsetField(String key) {
booleanValues.remove(key);
doubleValues.remove(key);
listValues.remove(key);
longValues.remove(key);
relationValues.remove(key);
stringValues.remove(key);
}
@Override
protected Boolean getBoolean(String key) {
return booleanValues.get(key);
}
@Override
protected Double getDouble(String key) {
return doubleValues.get(key);
}
@Override
protected IneList getList(String key) {
return listValues.get(key);
}
@Override
protected Long getLong(String key) {
return longValues.get(key);
}
@Override
protected Relation getRelation(String key) {
return relationValues.get(key);
}
@Override
protected String getString(String key) {
return stringValues.get(key);
}
@Override
public void setUnchecked(String key, Long value) {
longValues.put(key, value);
}
@Override
public void setUnchecked(String key, String value) {
stringValues.put(key, value);
}
@Override
public void setUnchecked(String key, IneList value) {
listValues.put(key, value);
}
@Override
public void setUnchecked(String key, Double value) {
doubleValues.put(key, value);
}
@Override
public void setUnchecked(String key, Boolean value) {
booleanValues.put(key, value);
}
@Override
public Long getLongUnchecked(String key) {
return longValues.get(key);
}
@Override
public Boolean getBooleanUnchecked(String key) {
return booleanValues.get(key);
}
@Override
public String getStringUnchecked(String key) {
return stringValues.get(key);
}
@Override
public Relation getRelationUnchecked(String key) {
return relationValues.get(key);
}
@Override
public Double getDoubleUnchecked(String key) {
return doubleValues.get(key);
}
@Override
protected boolean containsString(String key) {
return stringValues.containsKey(key);
}
@Override
protected boolean containsBoolean(String key) {
return booleanValues.containsKey(key);
}
@Override
protected boolean containsDouble(String key) {
return doubleValues.containsKey(key);
}
@Override
protected boolean containsList(String key) {
return listValues.containsKey(key);
}
@Override
protected boolean containsLong(String key) {
return longValues.containsKey(key);
}
@Override
protected boolean containsRelation(String key) {
return relationValues.containsKey(key);
}
@JsonIgnore
@Override
public List<String> getKeys() {
List<String> allKeys = new ArrayList<>();
allKeys.addAll(booleanValues.keySet());
allKeys.addAll(doubleValues.keySet());
allKeys.addAll(listValues.keySet());
allKeys.addAll(longValues.keySet());
allKeys.addAll(relationValues.keySet());
allKeys.addAll(stringValues.keySet());
allKeys.addAll(propJsons.keySet());
return allKeys;
}
@Override
protected void copyValuesTo(AssistedObject target) {
for (String key : booleanValues.keySet()) {
Boolean bool = this.booleanValues.get(key);
target.set(key, bool);
}
for (String key : doubleValues.keySet()) {
Double dbl = this.doubleValues.get(key);
target.set(key, dbl);
}
for (String key : listValues.keySet()) {
IneList ineList = this.listValues.get(key);
target.set(key, ineList == null ? null : new IneList(ineList));
}
for (String key : longValues.keySet()) {
Long lng = this.longValues.get(key);
target.set(key, lng);
}
for (String key : relationValues.keySet()) {
Relation relation = this.relationValues.get(key);
Relation targetRelation = target.getRelation(key);
if (targetRelation == null || relation == null || targetRelation.getKvo() == null
|| targetRelation.getId() == null
|| !targetRelation.getId().equals(relation.getId())) {
target.set(key, relation == null ? null : new Relation(relation));
} else {
if (relation.getKvo() == null)
targetRelation.setKvo(null);
else {
relation.getKvo().copyValuesTo(targetRelation.getKvo());
}
}
}
for (String key : stringValues.keySet()) {
String str = this.stringValues.get(key);
target.set(key, str);
}
for (String key : propJsons.keySet()) {
target.setPropsJson(key, propJsons.get(key));
}
}
/**
* NOT GENERATED FOR TESTING
*
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof KeyValueObject))
throw new IllegalArgumentException();
KeyValueObject other = (KeyValueObject) obj;
if (descriptorName == null) {
if (other.descriptorName != null)
return false;
} else if (!descriptorName.equals(other.descriptorName))
return false;
if (!booleanValues.equals(other.booleanValues))
return false;
if (!doubleValues.equals(other.doubleValues))
return false;
if (!listValues.equals(other.listValues))
return false;
if (!longValues.equals(other.longValues))
return false;
if (!relationValues.equals(other.relationValues))
return false;
if (!stringValues.equals(other.stringValues))
return false;
return true;
}
@JsonIgnore
@Override
public Set<String> getLongKeys() {
return longValues.keySet();
}
@JsonIgnore
@Override
public Set<String> getBooleanKeys() {
return booleanValues.keySet();
}
@JsonIgnore
@Override
public Set<String> getDoubleKeys() {
return doubleValues.keySet();
}
@JsonIgnore
@Override
public Set<String> getStringKeys() {
return stringValues.keySet();
}
@JsonIgnore
@Override
public Set<String> getListKeys() {
return listValues.keySet();
}
@JsonIgnore
@Override
public Set<String> getRelationKeys() {
return relationValues.keySet();
}
public Map<String, Boolean> getBooleanValues() {
return booleanValues;
}
public Map<String, Double> getDoubleValues() {
return doubleValues;
}
public Map<String, IneList> getListValues() {
return listValues;
}
public Map<String, Long> getLongValues() {
return longValues;
}
public Map<String, Relation> getRelationValues() {
return relationValues;
}
public Map<String, String> getStringValues() {
return stringValues;
}
@Override
public String getPropsJson(String id) {
return propJsons.get(id);
}
@Override
public void setPropsJson(String id, String json) {
propJsons.put(id, json);
}
@Override
public Map<String, String> getAllPropsJson() {
return propJsons;
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf.util;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.base.CaseFormat;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Splitter;
import com.google.common.primitives.Ints;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Internal;
import com.google.protobuf.Message;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Utility helper functions to work with {@link com.google.protobuf.FieldMask}.
*/
public final class FieldMaskUtil {
private static final String FIELD_PATH_SEPARATOR = ",";
private static final String FIELD_PATH_SEPARATOR_REGEX = ",";
private static final String FIELD_SEPARATOR_REGEX = "\\.";
private FieldMaskUtil() {}
/**
* Converts a FieldMask to a string.
*/
public static String toString(FieldMask fieldMask) {
// TODO(xiaofeng): Consider using com.google.common.base.Joiner here instead.
StringBuilder result = new StringBuilder();
boolean first = true;
for (String value : fieldMask.getPathsList()) {
if (value.isEmpty()) {
// Ignore empty paths.
continue;
}
if (first) {
first = false;
} else {
result.append(FIELD_PATH_SEPARATOR);
}
result.append(value);
}
return result.toString();
}
/**
* Parses from a string to a FieldMask.
*/
public static FieldMask fromString(String value) {
// TODO(xiaofeng): Consider using com.google.common.base.Splitter here instead.
return fromStringList(Arrays.asList(value.split(FIELD_PATH_SEPARATOR_REGEX)));
}
/**
* Parses from a string to a FieldMask and validates all field paths.
*
* @throws IllegalArgumentException if any of the field path is invalid.
*/
public static FieldMask fromString(Class<? extends Message> type, String value) {
// TODO(xiaofeng): Consider using com.google.common.base.Splitter here instead.
return fromStringList(type, Arrays.asList(value.split(FIELD_PATH_SEPARATOR_REGEX)));
}
/**
* Constructs a FieldMask for a list of field paths in a certain type.
*
* @throws IllegalArgumentException if any of the field path is not valid.
*/
// TODO(xiaofeng): Consider renaming fromStrings()
public static FieldMask fromStringList(Class<? extends Message> type, Iterable<String> paths) {
return fromStringList(Internal.getDefaultInstance(type).getDescriptorForType(), paths);
}
/**
* Constructs a FieldMask for a list of field paths in a certain type.
*
* @throws IllegalArgumentException if any of the field path is not valid.
*/
public static FieldMask fromStringList(Descriptor descriptor, Iterable<String> paths) {
return fromStringList(Optional.of(descriptor), paths);
}
/**
* Constructs a FieldMask for a list of field paths in a certain type. Does not validate the given
* paths.
*/
public static FieldMask fromStringList(Iterable<String> paths) {
return fromStringList(Optional.<Descriptor>absent(), paths);
}
private static FieldMask fromStringList(Optional<Descriptor> descriptor, Iterable<String> paths) {
FieldMask.Builder builder = FieldMask.newBuilder();
for (String path : paths) {
if (path.isEmpty()) {
// Ignore empty field paths.
continue;
}
if (descriptor.isPresent() && !isValid(descriptor.get(), path)) {
throw new IllegalArgumentException(
path + " is not a valid path for " + descriptor.get().getFullName());
}
builder.addPaths(path);
}
return builder.build();
}
/**
* Constructs a FieldMask from the passed field numbers.
*
* @throws IllegalArgumentException if any of the fields are invalid for the message.
*/
public static FieldMask fromFieldNumbers(Class<? extends Message> type, int... fieldNumbers) {
return fromFieldNumbers(type, Ints.asList(fieldNumbers));
}
/**
* Constructs a FieldMask from the passed field numbers.
*
* @throws IllegalArgumentException if any of the fields are invalid for the message.
*/
public static FieldMask fromFieldNumbers(
Class<? extends Message> type, Iterable<Integer> fieldNumbers) {
Descriptor descriptor = Internal.getDefaultInstance(type).getDescriptorForType();
FieldMask.Builder builder = FieldMask.newBuilder();
for (Integer fieldNumber : fieldNumbers) {
FieldDescriptor field = descriptor.findFieldByNumber(fieldNumber);
checkArgument(
field != null,
String.format("%s is not a valid field number for %s.", fieldNumber, type));
builder.addPaths(field.getName());
}
return builder.build();
}
/**
* Converts a field mask to a Proto3 JSON string, that is converting from snake case to camel
* case and joining all paths into one string with commas.
*/
public static String toJsonString(FieldMask fieldMask) {
List<String> paths = new ArrayList<String>(fieldMask.getPathsCount());
for (String path : fieldMask.getPathsList()) {
if (path.isEmpty()) {
continue;
}
paths.add(CaseFormat.LOWER_UNDERSCORE.to(CaseFormat.LOWER_CAMEL, path));
}
return Joiner.on(FIELD_PATH_SEPARATOR).join(paths);
}
/**
* Converts a field mask from a Proto3 JSON string, that is splitting the paths along commas and
* converting from camel case to snake case.
*/
public static FieldMask fromJsonString(String value) {
Iterable<String> paths = Splitter.on(FIELD_PATH_SEPARATOR).split(value);
FieldMask.Builder builder = FieldMask.newBuilder();
for (String path : paths) {
if (path.isEmpty()) {
continue;
}
builder.addPaths(CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, path));
}
return builder.build();
}
/**
* Checks whether paths in a given fields mask are valid.
*/
public static boolean isValid(Class<? extends Message> type, FieldMask fieldMask) {
Descriptor descriptor = Internal.getDefaultInstance(type).getDescriptorForType();
return isValid(descriptor, fieldMask);
}
/**
* Checks whether paths in a given fields mask are valid.
*/
public static boolean isValid(Descriptor descriptor, FieldMask fieldMask) {
for (String path : fieldMask.getPathsList()) {
if (!isValid(descriptor, path)) {
return false;
}
}
return true;
}
/**
* Checks whether a given field path is valid.
*/
public static boolean isValid(Class<? extends Message> type, String path) {
Descriptor descriptor = Internal.getDefaultInstance(type).getDescriptorForType();
return isValid(descriptor, path);
}
/**
* Checks whether paths in a given fields mask are valid.
*/
public static boolean isValid(Descriptor descriptor, String path) {
String[] parts = path.split(FIELD_SEPARATOR_REGEX);
if (parts.length == 0) {
return false;
}
for (String name : parts) {
if (descriptor == null) {
return false;
}
FieldDescriptor field = descriptor.findFieldByName(name);
if (field == null) {
return false;
}
if (!field.isRepeated() && field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
descriptor = field.getMessageType();
} else {
descriptor = null;
}
}
return true;
}
/**
* Converts a FieldMask to its canonical form. In the canonical form of a
* FieldMask, all field paths are sorted alphabetically and redundant field
* paths are removed.
*/
public static FieldMask normalize(FieldMask mask) {
return new FieldMaskTree(mask).toFieldMask();
}
/**
* Creates a union of two or more FieldMasks.
*/
public static FieldMask union(
FieldMask firstMask, FieldMask secondMask, FieldMask... otherMasks) {
FieldMaskTree maskTree = new FieldMaskTree(firstMask).mergeFromFieldMask(secondMask);
for (FieldMask mask : otherMasks) {
maskTree.mergeFromFieldMask(mask);
}
return maskTree.toFieldMask();
}
/** Subtracts {@code secondMask} and {@code otherMasks} from {@code firstMask}. */
public static FieldMask subtract(
FieldMask firstMask, FieldMask secondMask, FieldMask... otherMasks) {
FieldMaskTree maskTree = new FieldMaskTree(firstMask).removeFromFieldMask(secondMask);
for (FieldMask mask : otherMasks) {
maskTree.removeFromFieldMask(mask);
}
return maskTree.toFieldMask();
}
/**
* Calculates the intersection of two FieldMasks.
*/
public static FieldMask intersection(FieldMask mask1, FieldMask mask2) {
FieldMaskTree tree = new FieldMaskTree(mask1);
FieldMaskTree result = new FieldMaskTree();
for (String path : mask2.getPathsList()) {
tree.intersectFieldPath(path, result);
}
return result.toFieldMask();
}
/**
* Options to customize merging behavior.
*/
public static final class MergeOptions {
private boolean replaceMessageFields = false;
private boolean replaceRepeatedFields = false;
// TODO(b/28277137): change the default behavior to always replace primitive fields after
// fixing all failing TAP tests.
private boolean replacePrimitiveFields = false;
/**
* Whether to replace message fields (i.e., discard existing content in
* destination message fields).
*/
public boolean replaceMessageFields() {
return replaceMessageFields;
}
/**
* Whether to replace repeated fields (i.e., discard existing content in
* destination repeated fields).
*/
public boolean replaceRepeatedFields() {
return replaceRepeatedFields;
}
/**
* Whether to replace primitive (non-repeated and non-message) fields in
* destination message fields with the source primitive fields (i.e., clear
* destination field if source field is not set).
*/
public boolean replacePrimitiveFields() {
return replacePrimitiveFields;
}
/**
* Specify whether to replace message fields. Defaults to false.
*
* <p>If true, discard existing content in destination message fields when merging.
*
* <p>If false, merge the source message field into the destination message field.
*/
@CanIgnoreReturnValue
public MergeOptions setReplaceMessageFields(boolean value) {
replaceMessageFields = value;
return this;
}
/**
* Specify whether to replace repeated fields. Defaults to false.
*
* <p>If true, discard existing content in destination repeated fields) when merging.
*
* <p>If false, append elements from source repeated field to the destination repeated field.
*/
@CanIgnoreReturnValue
public MergeOptions setReplaceRepeatedFields(boolean value) {
replaceRepeatedFields = value;
return this;
}
/**
* Specify whether to replace primitive (non-repeated and non-message) fields in destination
* message fields with the source primitive fields. Defaults to false.
*
* <p>If true, set the value of the destination primitive field to the source primitive field if
* the source field is set, but clear the destination field otherwise.
*
* <p>If false, always set the value of the destination primitive field to the source primitive
* field, and if the source field is unset, the default value of the source field is copied to
* the destination.
*/
@CanIgnoreReturnValue
public MergeOptions setReplacePrimitiveFields(boolean value) {
replacePrimitiveFields = value;
return this;
}
}
/**
* Merges fields specified by a FieldMask from one message to another with the specified merge
* options. The destination will remain unchanged if an empty FieldMask is provided.
*/
public static void merge(
FieldMask mask, Message source, Message.Builder destination, MergeOptions options) {
new FieldMaskTree(mask).merge(source, destination, options);
}
/**
* Merges fields specified by a FieldMask from one message to another.
*/
public static void merge(FieldMask mask, Message source, Message.Builder destination) {
merge(mask, source, destination, new MergeOptions());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQueryBuilder> {
private static final String TYPE = "_doc";
private static final String PARENT_DOC = "parent";
private static final String CHILD_DOC = "child";
boolean requiresRewrite = false;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
@Override
protected Settings createTestIndexSettings() {
return Settings.builder()
.put(super.createTestIndexSettings())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build();
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties")
.startObject("join_field")
.field("type", "join")
.startObject("relations")
.field(PARENT_DOC, CHILD_DOC)
.endObject()
.endObject()
.startObject(STRING_FIELD_NAME)
.field("type", "text")
.endObject()
.startObject(STRING_FIELD_NAME_2)
.field("type", "keyword")
.endObject()
.startObject(INT_FIELD_NAME)
.field("type", "integer")
.endObject()
.startObject(DOUBLE_FIELD_NAME)
.field("type", "double")
.endObject()
.startObject(BOOLEAN_FIELD_NAME)
.field("type", "boolean")
.endObject()
.startObject(DATE_FIELD_NAME)
.field("type", "date")
.endObject()
.startObject(OBJECT_FIELD_NAME)
.field("type", "object")
.endObject()
.endObject().endObject().endObject();
mapperService.merge(TYPE,
new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
}
/**
* @return a {@link HasChildQueryBuilder} with random values all over the place
*/
@Override
protected HasParentQueryBuilder doCreateTestQueryBuilder() {
QueryBuilder innerQueryBuilder = new MatchAllQueryBuilder();
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
}
HasParentQueryBuilder hqb = new HasParentQueryBuilder(PARENT_DOC, innerQueryBuilder, randomBoolean());
hqb.ignoreUnmapped(randomBoolean());
if (randomBoolean()) {
hqb.innerHit(new InnerHitBuilder()
.setName(randomAlphaOfLengthBetween(1, 10))
.setSize(randomIntBetween(0, 100))
.addSort(new FieldSortBuilder(STRING_FIELD_NAME_2).order(SortOrder.ASC)));
}
return hqb;
}
@Override
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
if (queryBuilder.innerHit() != null) {
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
// doCreateTestQueryBuilder)
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(context);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
assertTrue(innerHitBuilders.containsKey(queryBuilder.innerHit().getName()));
InnerHitContextBuilder innerHits = innerHitBuilders.get(queryBuilder.innerHit().getName());
assertEquals(innerHits.innerHitBuilder(), queryBuilder.innerHit());
}
}
/**
* Test (de)serialization on all previous released versions
*/
public void testSerializationBWC() throws IOException {
for (Version version : VersionUtils.allReleasedVersions()) {
HasParentQueryBuilder testQuery = createTestQueryBuilder();
assertSerialization(testQuery, version);
}
}
public void testIllegalValues() throws IOException {
QueryBuilder query = new MatchAllQueryBuilder();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> hasParentQuery(null, query, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'parent_type' field"));
e = expectThrows(IllegalArgumentException.class,
() -> hasParentQuery("foo", null, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field"));
QueryShardContext context = createShardContext();
HasParentQueryBuilder qb = hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false);
QueryShardException qse = expectThrows(QueryShardException.class, () -> qb.doToQuery(context));
assertThat(qse.getMessage(), equalTo("[has_parent] join field [join_field] doesn't hold [just_a_type] as a parent"));
}
public void testToQueryInnerQueryType() throws IOException {
QueryShardContext shardContext = createShardContext();
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_DOC, new IdsQueryBuilder().addIds("id"),
false);
Query query = hasParentQueryBuilder.toQuery(shardContext);
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_DOC, "id");
}
@Override
public void testMustRewrite() throws IOException {
try {
super.testMustRewrite();
} catch (UnsupportedOperationException e) {
if (requiresRewrite == false) {
throw e;
}
}
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"has_parent\" : {\n" +
" \"query\" : {\n" +
" \"term\" : {\n" +
" \"tag\" : {\n" +
" \"value\" : \"something\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" },\n" +
" \"parent_type\" : \"blog\",\n" +
" \"score\" : true,\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
HasParentQueryBuilder parsed = (HasParentQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "blog", parsed.type());
assertEquals(json, "something", ((TermQueryBuilder) parsed.query()).value());
}
public void testIgnoreUnmapped() throws IOException {
final HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
queryBuilder.innerHit(new InnerHitBuilder());
assertFalse(queryBuilder.innerHit().isIgnoreUnmapped());
queryBuilder.ignoreUnmapped(true);
assertTrue(queryBuilder.innerHit().isIgnoreUnmapped());
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final HasParentQueryBuilder failingQueryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
failingQueryBuilder.innerHit(new InnerHitBuilder());
assertFalse(failingQueryBuilder.innerHit().isIgnoreUnmapped());
failingQueryBuilder.ignoreUnmapped(false);
assertFalse(failingQueryBuilder.innerHit().isIgnoreUnmapped());
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(),
containsString("[has_parent] join field [join_field] doesn't hold [unmapped] as a parent"));
}
public void testIgnoreUnmappedWithRewrite() throws IOException {
// WrapperQueryBuilder makes sure we always rewrite
final HasParentQueryBuilder queryBuilder =
new HasParentQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false);
queryBuilder.ignoreUnmapped(true);
QueryShardContext queryShardContext = createShardContext();
Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
public void testExtractInnerHitBuildersWithDuplicate() {
final HasParentQueryBuilder queryBuilder
= new HasParentQueryBuilder(CHILD_DOC, new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false);
queryBuilder.innerHit(new InnerHitBuilder("some_name"));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> InnerHitContextBuilder.extractInnerHits(queryBuilder, Collections.singletonMap("some_name", null)));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ideasoft.yuqing.crawl;
import java.io.*;
import java.util.*;
import java.net.*;
// Commons Logging imports
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolBase;
import cn.ideasoft.yuqing.net.URLFilters;
import cn.ideasoft.yuqing.net.URLNormalizers;
import cn.ideasoft.yuqing.parse.*;
import cn.ideasoft.yuqing.util.LockUtil;
import cn.ideasoft.yuqing.util.YuQingConfiguration;
import cn.ideasoft.yuqing.util.YuQingJob;
/** Maintains an inverted link map, listing incoming links for each url. */
public class LinkDb extends ToolBase implements Mapper, Reducer {
public static final Log LOG = LogFactory.getLog(LinkDb.class);
public static final String CURRENT_NAME = "current";
public static final String LOCK_NAME = ".locked";
private int maxAnchorLength;
private int maxInlinks;
private boolean ignoreInternalLinks;
private URLFilters urlFilters;
private URLNormalizers urlNormalizers;
public static class Merger extends MapReduceBase implements Reducer {
private int _maxInlinks;
public void configure(JobConf job) {
super.configure(job);
_maxInlinks = job.getInt("db.max.inlinks", 10000);
}
public void reduce(WritableComparable key, Iterator values, OutputCollector output, Reporter reporter) throws IOException {
Inlinks inlinks = null;
while (values.hasNext()) {
if (inlinks == null) {
inlinks = (Inlinks)values.next();
continue;
}
Inlinks val = (Inlinks)values.next();
for (Iterator it = val.iterator(); it.hasNext(); ) {
if (inlinks.size() >= _maxInlinks) {
output.collect(key, inlinks);
return;
}
Inlink in = (Inlink)it.next();
inlinks.add(in);
}
}
if (inlinks.size() == 0) return;
output.collect(key, inlinks);
}
}
public LinkDb() {
}
public LinkDb(Configuration conf) {
setConf(conf);
}
public void configure(JobConf job) {
maxAnchorLength = job.getInt("db.max.anchor.length", 100);
maxInlinks = job.getInt("db.max.inlinks", 10000);
ignoreInternalLinks = job.getBoolean("db.ignore.internal.links", true);
if (job.getBoolean(LinkDbFilter.URL_FILTERING, false)) {
urlFilters = new URLFilters(job);
}
if (job.getBoolean(LinkDbFilter.URL_NORMALIZING, false)) {
urlNormalizers = new URLNormalizers(job, URLNormalizers.SCOPE_LINKDB);
}
}
public void close() {}
public void map(WritableComparable key, Writable value,
OutputCollector output, Reporter reporter)
throws IOException {
String fromUrl = key.toString();
String fromHost = getHost(fromUrl);
if (urlNormalizers != null) {
try {
fromUrl = urlNormalizers.normalize(fromUrl, URLNormalizers.SCOPE_LINKDB); // normalize the url
} catch (Exception e) {
LOG.warn("Skipping " + fromUrl + ":" + e);
fromUrl = null;
}
}
if (fromUrl != null && urlFilters != null) {
try {
fromUrl = urlFilters.filter(fromUrl); // filter the url
} catch (Exception e) {
LOG.warn("Skipping " + fromUrl + ":" + e);
fromUrl = null;
}
}
if (fromUrl == null) return; // discard all outlinks
ParseData parseData = (ParseData)value;
Outlink[] outlinks = parseData.getOutlinks();
Inlinks inlinks = new Inlinks();
for (int i = 0; i < outlinks.length; i++) {
Outlink outlink = outlinks[i];
String toUrl = outlink.getToUrl();
if (ignoreInternalLinks) {
String toHost = getHost(toUrl);
if (toHost == null || toHost.equals(fromHost)) { // internal link
continue; // skip it
}
}
if (urlNormalizers != null) {
try {
toUrl = urlNormalizers.normalize(toUrl, URLNormalizers.SCOPE_LINKDB); // normalize the url
} catch (Exception e) {
LOG.warn("Skipping " + toUrl + ":" + e);
toUrl = null;
}
}
if (toUrl != null && urlFilters != null) {
try {
toUrl = urlFilters.filter(toUrl); // filter the url
} catch (Exception e) {
LOG.warn("Skipping " + toUrl + ":" + e);
toUrl = null;
}
}
if (toUrl == null) continue;
inlinks.clear();
String anchor = outlink.getAnchor(); // truncate long anchors
if (anchor.length() > maxAnchorLength) {
anchor = anchor.substring(0, maxAnchorLength);
}
inlinks.add(new Inlink(fromUrl, anchor)); // collect inverted link
output.collect(new Text(toUrl), inlinks);
}
}
private String getHost(String url) {
try {
return new URL(url).getHost().toLowerCase();
} catch (MalformedURLException e) {
return null;
}
}
public void reduce(WritableComparable key, Iterator values,
OutputCollector output, Reporter reporter)
throws IOException {
Inlinks result = new Inlinks();
while (values.hasNext()) {
Inlinks inlinks = (Inlinks)values.next();
int end = Math.min(maxInlinks - result.size(), inlinks.size());
Iterator it = inlinks.iterator();
int i = 0;
while(it.hasNext() && i++ < end) {
result.add((Inlink)it.next());
}
}
if (result.size() == 0) return;
output.collect(key, result);
}
public void invert(Path linkDb, final Path segmentsDir, boolean normalize, boolean filter, boolean force) throws IOException {
final FileSystem fs = FileSystem.get(getConf());
Path[] files = fs.listPaths(segmentsDir, new PathFilter() {
public boolean accept(Path f) {
try {
if (fs.isDirectory(f)) return true;
} catch (IOException ioe) {};
return false;
}
});
invert(linkDb, files, normalize, filter, force);
}
public void invert(Path linkDb, Path[] segments, boolean normalize, boolean filter, boolean force) throws IOException {
Path lock = new Path(linkDb, LOCK_NAME);
FileSystem fs = FileSystem.get(getConf());
LockUtil.createLockFile(fs, lock, force);
Path currentLinkDb = new Path(linkDb, CURRENT_NAME);
if (LOG.isInfoEnabled()) {
LOG.info("LinkDb: starting");
LOG.info("LinkDb: linkdb: " + linkDb);
LOG.info("LinkDb: URL normalize: " + normalize);
LOG.info("LinkDb: URL filter: " + filter);
}
JobConf job = LinkDb.createJob(getConf(), linkDb, normalize, filter);
for (int i = 0; i < segments.length; i++) {
if (LOG.isInfoEnabled()) {
LOG.info("LinkDb: adding segment: " + segments[i]);
}
job.addInputPath(new Path(segments[i], ParseData.DIR_NAME));
}
try {
JobClient.runJob(job);
} catch (IOException e) {
LockUtil.removeLockFile(fs, lock);
throw e;
}
if (fs.exists(currentLinkDb)) {
if (LOG.isInfoEnabled()) {
LOG.info("LinkDb: merging with existing linkdb: " + linkDb);
}
// try to merge
Path newLinkDb = job.getOutputPath();
job = LinkDb.createMergeJob(getConf(), linkDb, normalize, filter);
job.addInputPath(currentLinkDb);
job.addInputPath(newLinkDb);
try {
JobClient.runJob(job);
} catch (IOException e) {
LockUtil.removeLockFile(fs, lock);
fs.delete(newLinkDb);
throw e;
}
fs.delete(newLinkDb);
}
LinkDb.install(job, linkDb);
if (LOG.isInfoEnabled()) { LOG.info("LinkDb: done"); }
}
private static JobConf createJob(Configuration config, Path linkDb, boolean normalize, boolean filter) {
Path newLinkDb =
new Path("linkdb-" +
Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
JobConf job = new YuQingJob(config);
job.setJobName("linkdb " + linkDb);
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(LinkDb.class);
// if we don't run the mergeJob, perform normalization/filtering now
if (normalize || filter) {
try {
FileSystem fs = FileSystem.get(config);
if (!fs.exists(linkDb)) {
job.setBoolean(LinkDbFilter.URL_FILTERING, filter);
job.setBoolean(LinkDbFilter.URL_NORMALIZING, normalize);
}
} catch (Exception e) {
LOG.warn("LinkDb createJob: " + e);
}
}
job.setReducerClass(LinkDb.class);
job.setOutputPath(newLinkDb);
job.setOutputFormat(MapFileOutputFormat.class);
job.setBoolean("mapred.output.compress", true);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Inlinks.class);
return job;
}
public static JobConf createMergeJob(Configuration config, Path linkDb, boolean normalize, boolean filter) {
Path newLinkDb =
new Path("linkdb-merge-" +
Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
JobConf job = new YuQingJob(config);
job.setJobName("linkdb merge " + linkDb);
job.setInputFormat(SequenceFileInputFormat.class);
job.setMapperClass(LinkDbFilter.class);
job.setBoolean(LinkDbFilter.URL_NORMALIZING, normalize);
job.setBoolean(LinkDbFilter.URL_FILTERING, filter);
job.setReducerClass(Merger.class);
job.setOutputPath(newLinkDb);
job.setOutputFormat(MapFileOutputFormat.class);
job.setBoolean("mapred.output.compress", true);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Inlinks.class);
return job;
}
public static void install(JobConf job, Path linkDb) throws IOException {
Path newLinkDb = job.getOutputPath();
FileSystem fs = new JobClient(job).getFs();
Path old = new Path(linkDb, "old");
Path current = new Path(linkDb, CURRENT_NAME);
if (fs.exists(current)) {
if (fs.exists(old)) fs.delete(old);
fs.rename(current, old);
}
fs.mkdirs(linkDb);
fs.rename(newLinkDb, current);
if (fs.exists(old)) fs.delete(old);
LockUtil.removeLockFile(fs, new Path(linkDb, LOCK_NAME));
}
public static void main(String[] args) throws Exception {
int res = new LinkDb().doMain(YuQingConfiguration.create(), args);
System.exit(res);
}
public int run(String[] args) throws Exception {
if (args.length < 2) {
System.err.println("Usage: LinkDb <linkdb> (-dir <segmentsDir> | <seg1> <seg2> ...) [-force] [-noNormalize] [-noFilter]");
System.err.println("\tlinkdb\toutput LinkDb to create or update");
System.err.println("\t-dir segmentsDir\tparent directory of several segments, OR");
System.err.println("\tseg1 seg2 ...\t list of segment directories");
System.err.println("\t-force\tforce update even if LinkDb appears to be locked (CAUTION advised)");
System.err.println("\t-noNormalize\tdon't normalize link URLs");
System.err.println("\t-noFilter\tdon't apply URLFilters to link URLs");
return -1;
}
Path segDir = null;
final FileSystem fs = FileSystem.get(conf);
Path db = new Path(args[0]);
ArrayList segs = new ArrayList();
boolean filter = true;
boolean normalize = true;
boolean force = false;
for (int i = 1; i < args.length; i++) {
if (args[i].equals("-dir")) {
segDir = new Path(args[++i]);
Path[] files = fs.listPaths(segDir, new PathFilter() {
public boolean accept(Path f) {
try {
if (fs.isDirectory(f)) return true;
} catch (IOException ioe) {};
return false;
}
});
if (files != null) segs.addAll(Arrays.asList(files));
break;
} else if (args[i].equalsIgnoreCase("-noNormalize")) {
normalize = false;
} else if (args[i].equalsIgnoreCase("-noFilter")) {
filter = false;
} else if (args[i].equalsIgnoreCase("-force")) {
force = true;
} else segs.add(new Path(args[i]));
}
try {
invert(db, (Path[])segs.toArray(new Path[segs.size()]), normalize, filter, force);
return 0;
} catch (Exception e) {
LOG.fatal("LinkDb: " + StringUtils.stringifyException(e));
return -1;
}
}
}
| |
/*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Norris Boyd
* Igor Bukanov
* Ethan Hugg
* Terry Lucas
* Milen Nankov
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino;
/**
* This class allows the creation of nodes, and follows the Factory pattern.
*
* @see Node
*
*
*/
final class IRFactory
{
IRFactory(Parser parser)
{
this.parser = parser;
}
ScriptOrFnNode createScript()
{
return new ScriptOrFnNode(Token.SCRIPT);
}
/**
* Script (for associating file/url names with toplevel scripts.)
*/
void initScript(ScriptOrFnNode scriptNode, Node body)
{
Node children = body.removeChildren();
if (children != null) { scriptNode.addChildrenToBack(children); }
}
/**
* Leaf
*/
Node createLeaf(int nodeType)
{
return new Node(nodeType);
}
/**
* Leaf
*/
Node createLeaf(int nodeType, int lineno, int charno)
{
return new Node(nodeType, lineno, charno);
}
/**
* Statement leaf nodes.
*/
Node createSwitch(int lineno, int charno)
{
return new Node(Token.SWITCH, lineno, charno);
}
/**
* If caseExpression argument is null it indicate default label.
*/
void addSwitchCase(Node switchNode, Node caseExpression, Node statements,
int lineno, int charno)
{
if (switchNode.getType() != Token.SWITCH) throw Kit.codeBug();
Node caseNode;
if (caseExpression != null) {
caseNode = new Node(
Token.CASE, caseExpression, lineno, charno);
} else {
caseNode = new Node(Token.DEFAULT, lineno, charno);
}
caseNode.addChildToBack(statements);
switchNode.addChildToBack(caseNode);
}
void closeSwitch(Node switchBlock)
{
}
Node createVariables(int token, int lineno, int charno)
{
return new Node(token, lineno, charno);
}
Node createExprStatement(Node expr, int lineno, int charno)
{
int type;
if (parser.insideFunction()) {
type = Token.EXPR_VOID;
} else {
type = Token.EXPR_RESULT;
}
return new Node(type, expr, lineno, charno);
}
Node createExprStatementNoReturn(Node expr, int lineno, int charno)
{
return new Node(Token.EXPR_VOID, expr, lineno, charno);
}
Node createDefaultNamespace(Node expr, int lineno, int charno)
{
// default xml namespace requires activation
setRequiresActivation();
Node n = createUnary(Token.DEFAULTNAMESPACE, expr, lineno, charno);
Node result = createExprStatement(n, lineno, charno);
return result;
}
public Node createErrorName() {
return Node.newString(Token.NAME, "error");
}
/**
* Name
*/
Node createName(String name, int lineno, int charno)
{
checkActivationName(name, Token.NAME);
return Node.newString(Token.NAME, name, lineno, charno);
}
public Node createTaggedName(String name, JSDocInfo info,
int lineno, int charno) {
Node n = createName(name, lineno, charno);
if (info != null) {
n.setJSDocInfo(info);
}
return n;
}
/**
* String (for literals)
*/
Node createString(String string)
{
return Node.newString(string);
}
Node createString(String string, int lineno, int charno)
{
return Node.newString(string, lineno, charno);
}
/**
* Number (for literals)
*/
Node createNumber(double number)
{
return Node.newNumber(number);
}
Node createNumber(double number, int lineno, int charno)
{
return Node.newNumber(number, lineno, charno);
}
/**
* Catch clause of try/catch/finally
* @param varName the name of the variable to bind to the exception
* @param nameLineno the starting line number of the exception clause
* @param nameCharno the starting char number of the exception clause
* @param catchCond the condition under which to catch the exception.
* May be null if no condition is given.
* @param stmts the statements in the catch clause
* @param catchLineno the starting line number of the catch clause
* @param catchCharno the starting char number of the catch clause
*/
Node createCatch(String varName, int nameLineno, int nameCharno,
Node catchCond, Node stmts, int catchLineno, int catchCharno)
{
if (catchCond == null) {
catchCond = new Node(Token.EMPTY, nameLineno, nameCharno);
}
return new Node(Token.CATCH,
createName(varName, nameLineno, nameCharno),
catchCond, stmts, catchLineno, catchCharno);
}
/**
* Throw
*/
Node createThrow(Node expr, int lineno, int charno)
{
return new Node(Token.THROW, expr, lineno, charno);
}
/**
* Return
*/
Node createReturn(Node expr, int lineno, int charno)
{
return expr == null
? new Node(Token.RETURN, lineno, charno)
: new Node(Token.RETURN, expr, lineno, charno);
}
/**
* Label
*/
Node createLabel(String name, int lineno, int charno)
{
return new Node(Token.LABEL,
Node.newString(Token.NAME, name, lineno, charno),
lineno, charno);
}
/**
* Break (possibly labeled)
*/
Node createBreak(String label, int lineno, int charno)
{
Node result = new Node(Token.BREAK, lineno, charno);
if (label == null) {
return result;
} else {
Node name = Node.newString(Token.NAME, label, lineno, charno);
result.addChildToBack(name);
return result;
}
}
/**
* Continue (possibly labeled)
*/
Node createContinue(String label, int lineno, int charno)
{
Node result = new Node(Token.CONTINUE, lineno, charno);
if (label == null) {
return result;
} else {
Node name = Node.newString(Token.NAME, label, lineno, charno);
result.addChildToBack(name);
return result;
}
}
/**
* debugger
*/
Node createDebugger(int lineno, int charno) {
return new Node(Token.DEBUGGER, lineno, charno);
}
/**
* Statement block
* Creates the empty statement block
* Must make subsequent calls to add statements to the node
*/
Node createBlock(int lineno, int charno)
{
return new Node(Token.BLOCK, lineno, charno);
}
FunctionNode createFunction(String name, int lineno, int charno)
{
FunctionNode fnNode = new FunctionNode(name, lineno, charno);
// A hack to preserve the existing JSCompiler code that depends on
// having the first child node being a NAME node.
// TODO(user): Remove this when the JSCompiler code has been fixed.
fnNode.addChildToBack(createName(name, lineno, charno));
return fnNode;
}
Node initFunction(FunctionNode fnNode, int functionIndex,
Node args, JSDocInfo info,
Node statements, int functionType)
{
fnNode.itsFunctionType = functionType;
fnNode.addChildToBack(args);
fnNode.addChildToBack(statements);
if (parser.getSourceName() != null) {
fnNode.putProp(Node.SOURCENAME_PROP, parser.getSourceName());
}
if (info != null) {
fnNode.setJSDocInfo(info);
}
int functionCount = fnNode.getFunctionCount();
if (functionCount != 0) {
// Functions containing other functions require activation objects
fnNode.itsNeedsActivation = true;
for (int i = 0; i != functionCount; ++i) {
FunctionNode fn = fnNode.getFunctionNode(i);
// nested function expression statements overrides var
if (fn.getFunctionType()
== FunctionNode.FUNCTION_EXPRESSION_STATEMENT)
{
String name = fn.getFunctionName();
if (name != null && name.length() != 0) {
fnNode.removeParamOrVar(name);
}
}
}
}
fnNode.putIntProp(Node.FUNCTION_PROP, functionIndex);
return fnNode;
}
/**
* Add a child to the back of the given node. This function
* breaks the Factory abstraction, but it removes a requirement
* from implementors of Node.
*/
void addChildToBack(Node parent, Node child)
{
parent.addChildToBack(child);
}
/**
* While
*/
Node createWhile(Node cond, Node body, int lineno, int charno)
{
return new Node(Token.WHILE, cond, body, lineno, charno);
}
/**
* DoWhile
*/
Node createDoWhile(Node body, Node cond, int lineno, int charno)
{
return new Node(Token.DO, body, cond, lineno, charno);
}
/**
* For
*/
Node createFor(Node init, Node test, Node incr, Node body,
int lineno, int charno)
{
return new Node(Token.FOR, init, test, incr, body, lineno, charno);
}
/**
* For .. In
*
*/
Node createForIn(Node lhs, Node obj, Node body,
int lineno, int charno)
{
return new Node(Token.FOR, lhs, obj, body, lineno, charno);
}
/**
* Try/Catch/Finally
*/
Node createTryCatchFinally(Node tryBlock, Node catchBlocks,
Node finallyBlock, int lineno, int charno)
{
if (finallyBlock == null) {
return new Node(
Token.TRY, tryBlock, catchBlocks, lineno, charno);
}
return new Node(Token.TRY, tryBlock, catchBlocks, finallyBlock,
lineno, charno);
}
/**
* Throw, Return, Label, Break and Continue are defined in ASTFactory.
*/
/**
* With
*/
Node createWith(Node obj, Node body, int lineno, int charno)
{
return new Node(Token.WITH, obj, body, lineno, charno);
}
/**
* DOTQUERY
*/
public Node createDotQuery (Node obj, Node body, int lineno, int charno)
{
setRequiresActivation();
Node result = new Node(Token.DOTQUERY, obj, body, lineno, charno);
return result;
}
Node createArrayLiteral(ObjArray elems, int skipCount,
int lineno, int charno)
{
int length = elems.size();
int[] skipIndexes = null;
if (skipCount != 0) {
skipIndexes = new int[skipCount];
}
Node array = new Node(Token.ARRAYLIT, lineno, charno);
for (int i = 0, j = 0; i != length; ++i) {
Node elem = (Node)elems.get(i);
if (elem != null) {
array.addChildToBack(elem);
} else {
skipIndexes[j] = i;
++j;
}
}
if (skipCount != 0) {
array.putProp(Node.SKIP_INDEXES_PROP, skipIndexes);
}
return array;
}
/**
* Object Literals
*/
Node createObjectLiteral(ObjArray obj, int lineno, int charno)
{
Node object = new Node(Token.OBJECTLIT, lineno, charno);
for (int i = 0; i < obj.size(); i += 2) {
Node n = (Node)obj.get(i);
object.addChildToBack(n);
n = (Node)obj.get(i + 1);
object.addChildToBack(n);
}
return object;
}
/**
* Regular expressions
*/
Node createRegExp(String string, String flags,
int lineno, int charno) {
return flags.length() == 0
? new Node(Token.REGEXP,
Node.newString(string, lineno, charno),
lineno, charno)
: new Node(Token.REGEXP,
Node.newString(string, lineno, charno),
Node.newString(flags, lineno, charno),
lineno, charno);
}
/**
* If statement
*/
Node createIf(Node cond, Node ifTrue, Node ifFalse, int lineno, int charno)
{
if (ifFalse == null)
return new Node(Token.IF, cond, ifTrue, lineno, charno);
return new Node(Token.IF, cond, ifTrue, ifFalse, lineno, charno);
}
Node createCondExpr(Node cond, Node ifTrue, Node ifFalse,
int lineno, int charno)
{
return new Node(Token.HOOK, cond, ifTrue, ifFalse, lineno, charno);
}
/**
* Unary
*/
Node createUnary(int nodeType, Node child, int lineno, int charno)
{
return new Node(nodeType, child, lineno, charno);
}
Node createCallOrNew(int nodeType, Node child, int lineno, int charno)
{
int type = Node.NON_SPECIALCALL;
if (child.getType() == Token.NAME) {
String name = child.getString();
if (name.equals("eval")) {
type = Node.SPECIALCALL_EVAL;
} else if (name.equals("With")) {
type = Node.SPECIALCALL_WITH;
}
} else if (child.getType() == Token.GETPROP) {
String name = child.getLastChild().getString();
if (name.equals("eval")) {
type = Node.SPECIALCALL_EVAL;
}
}
Node node = new Node(nodeType, child, lineno, charno);
if (type != Node.NON_SPECIALCALL) {
// Calls to these functions require activation objects.
setRequiresActivation();
node.putIntProp(Node.SPECIALCALL_PROP, type);
}
return node;
}
Node createIncDec(int nodeType, boolean post, Node child,
int lineno, int charno)
{
child = makeReference(child);
if (child == null) {
String msg;
if (nodeType == Token.DEC) {
msg = "msg.bad.decr";
} else {
msg = "msg.bad.incr";
}
parser.reportError(msg);
return null;
}
int childType = child.getType();
switch (childType) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
case Token.GET_REF:
case Token.CALL: {
Node n = new Node(nodeType, child, lineno, charno);
n.putIntProp(Node.INCRDECR_PROP, post ? 1 : 0);
return n;
}
}
throw Kit.codeBug();
}
Node createPropertyGet(Node target, String namespace, String name,
int memberTypeFlags, int dotLineno, int dotCharno,
int nameLineno, int nameCharno)
{
if (namespace == null && memberTypeFlags == 0) {
if (target == null) {
return createName(name, nameLineno, nameCharno);
}
checkActivationName(name, Token.GETPROP);
if (ScriptRuntime.isSpecialProperty(name)) {
Node ref = new Node(Token.REF_SPECIAL, target);
ref.putProp(Node.NAME_PROP, name);
return new Node(Token.GET_REF, ref, dotLineno, dotCharno);
}
return new Node(
Token.GETPROP, target,
createString(name, nameLineno, nameCharno),
dotLineno, dotCharno);
}
Node elem = createString(name);
memberTypeFlags |= Node.PROPERTY_FLAG;
return createMemberRefGet(target, namespace, elem, memberTypeFlags,
dotLineno, dotCharno);
}
Node createElementGet(Node target, String namespace, Node elem,
int memberTypeFlags, int lineno, int charno)
{
// OPT: could optimize to createPropertyGet
// iff elem is string that can not be number
if (namespace == null && memberTypeFlags == 0) {
// stand-alone [aaa] as primary expression is array literal
// declaration and should not come here!
if (target == null) throw Kit.codeBug();
return new Node(Token.GETELEM, target, elem, lineno, charno);
}
return createMemberRefGet(target, namespace, elem, memberTypeFlags,
lineno, charno);
}
private Node createMemberRefGet(Node target, String namespace, Node elem,
int memberTypeFlags, int lineno, int charno)
{
Node nsNode = null;
if (namespace != null) {
// See 11.1.2 in ECMA 357
if (namespace.equals("*")) {
nsNode = new Node(Token.NULL, lineno, charno);
} else {
nsNode = createName(namespace, lineno, charno);
}
}
Node ref;
if (target == null) {
if (namespace == null) {
ref = new Node(Token.REF_NAME, elem, lineno, charno);
} else {
ref = new Node(Token.REF_NS_NAME, nsNode, elem, lineno, charno);
}
} else {
if (namespace == null) {
ref = new Node(Token.REF_MEMBER, target, elem, lineno, charno);
} else {
ref = new Node(Token.REF_NS_MEMBER, target, nsNode, elem,
lineno, charno);
}
}
if (memberTypeFlags != 0) {
ref.putIntProp(Node.MEMBER_TYPE_PROP, memberTypeFlags);
}
return new Node(Token.GET_REF, ref, lineno, charno);
}
/**
* Binary
*/
Node createBinary(int nodeType, Node left, Node right,
int lineno, int charno)
{
Node temp;
switch (nodeType) {
case Token.DOT:
nodeType = Token.GETPROP;
Node idNode = right;
idNode.setType(Token.STRING);
break;
case Token.LB:
// OPT: could optimize to GETPROP iff string can't be a number
nodeType = Token.GETELEM;
break;
}
return new Node(nodeType, left, right, lineno, charno);
}
Node createAssignment(int nodeOp, Node left, Node right,
int lineno, int charno) throws JavaScriptException
{
int nodeType = left.getType();
switch (nodeType) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
break;
default:
// TODO: This should be a ReferenceError--but that's a runtime
// exception. Should we compile an exception into the code?
parser.reportError("msg.bad.assign.left");
}
return new Node(Token.ASSIGN, left, right, lineno, charno);
}
private Node makeReference(Node node)
{
int type = node.getType();
switch (type) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
case Token.GET_REF:
case Token.CALL:
return node;
}
// Signal caller to report error
return null;
}
// Commented-out: no longer used
// private static boolean hasSideEffects(Node exprTree)
// {
// switch (exprTree.getType()) {
// case Token.INC:
// case Token.DEC:
// case Token.SETPROP:
// case Token.SETELEM:
// case Token.SETNAME:
// case Token.CALL:
// case Token.NEW:
// return true;
// default:
// Node child = exprTree.getFirstChild();
// while (child != null) {
// if (hasSideEffects(child))
// return true;
// child = child.getNext();
// }
// break;
// }
// return false;
// }
private void checkActivationName(String name, int token)
{
if (parser.insideFunction()) {
boolean activation = false;
if ("arguments".equals(name)
|| (parser.compilerEnv.activationNames != null
&& parser.compilerEnv.activationNames.containsKey(name)))
{
activation = true;
} else if ("length".equals(name)) {
if (token == Token.GETPROP
&& parser.compilerEnv.getLanguageVersion()
== Context.VERSION_1_2)
{
// Use of "length" in 1.2 requires an activation object.
activation = true;
}
}
if (activation) {
setRequiresActivation();
}
}
}
private void setRequiresActivation()
{
if (parser.insideFunction()) {
((FunctionNode)parser.currentScriptOrFn).itsNeedsActivation = true;
}
}
private Parser parser;
}
| |
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.jdbc.proxy;
import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.sameInstance;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import com.dremio.exec.ExecTest;
import com.dremio.jdbc.SabotNodeRule;
/**
* Test of TracingProxyDriver other than loading of driver classes.
*/
public class TracingProxyDriverTest extends ExecTest {
@ClassRule
public static final SabotNodeRule sabotNode = new SabotNodeRule();
private static Driver proxyDriver;
private static Connection proxyConnection;
@BeforeClass
public static void setUpTestCase() throws SQLException,
ClassNotFoundException {
Class.forName( "com.dremio.jdbc.proxy.TracingProxyDriver" );
proxyDriver =
DriverManager.getDriver(
"jdbc:proxy:com.dremio.jdbc.Driver:" + sabotNode.getJDBCConnectionString() );
proxyConnection =
DriverManager.getConnection( "jdbc:proxy::" + sabotNode.getJDBCConnectionString() );
}
@Test
public void testBasicProxying() throws SQLException {
try ( final Statement stmt = proxyConnection.createStatement() ) {
final ResultSet rs =
stmt.executeQuery( "SELECT * FROM INFORMATION_SCHEMA.CATALOGS" );
assertTrue( rs.next() );
assertThat( rs.getString( 1 ), equalTo( "DREMIO" ) );
assertThat( rs.getObject( 1 ), equalTo( (Object) "DREMIO" ) );
}
}
private static class StdErrCapturer {
private final PrintStream savedStdErr;
private final ByteArrayOutputStream buffer = new ByteArrayOutputStream();
private final PrintStream capturingStream = new PrintStream( buffer );
private boolean redirected;
StdErrCapturer() {
savedStdErr = System.err;
}
void redirect() {
assertFalse( redirected );
redirected = true;
System.setErr( capturingStream );
}
void unredirect() {
assertTrue( redirected );
redirected = false;
System.setErr( savedStdErr );
}
String getOutput() {
assertFalse( redirected );
return new String( buffer.toByteArray(), StandardCharsets.UTF_8 );
}
}
@Test
public void testBasicReturnTrace() throws SQLException {
final StdErrCapturer nameThis = new StdErrCapturer();
try {
nameThis.redirect();
proxyConnection.isClosed();
}
finally {
nameThis.unredirect();
}
// Check captured System.err:
final String output = nameThis.getOutput();
final String[] lines = output.split( "\n" );
assertThat( "Not 2 lines: \"\"\"" + output + "\"\"\"",
lines.length, equalTo( 2 ) );
final String callLine = lines[ 0 ];
final String returnLine = lines[ 1 ];
// Expect something like current:
// TRACER: CALL: ((Connection) <id=3> ...) . isClosed()
// TRACER: RETURN: ((Connection) <id=3> ...) . isClosed(), RESULT: (boolean) false
assertThat( callLine, containsString( " CALL:" ) );
assertThat( returnLine, containsString( " RETURN:" ) );
assertThat( callLine, containsString( "(Connection)" ) );
assertThat( returnLine, containsString( "(Connection)" ) );
assertThat( callLine, containsString( "isClosed()" ) );
assertThat( returnLine, containsString( "isClosed()" ) );
assertThat( callLine, not( containsString( " (boolean) " ) ) );
assertThat( returnLine, containsString( " (boolean) " ) );
assertThat( callLine, not( containsString( "false" ) ) );
assertThat( returnLine, containsString( "false" ) );
}
@Test
public void testBasicThrowTrace() throws SQLException {
final StdErrCapturer stdErrCapturer = new StdErrCapturer();
final Statement statement = proxyConnection.createStatement();
statement.close();
try {
stdErrCapturer.redirect();
statement.execute( "" );
}
catch ( final SQLException e ) {
// "already closed" is expected
}
finally {
stdErrCapturer.unredirect();
}
// Check captured System.err:
final String output = stdErrCapturer.getOutput();
final String[] lines = output.split( "\n" );
assertThat( "Not 2 lines: \"\"\"" + output + "\"\"\"",
lines.length, equalTo( 2 ) );
final String callLine = lines[ 0 ];
final String returnLine = lines[ 1 ];
// Expect something like current:
// TRACER: CALL: ((Statement) <id=6> ...) . execute( (String) "" )
// TRACER: THROW: ((Statement) <id=6> ...) . execute( (String) "" ), th\
// rew: (com.dremio.jdbc.AlreadyClosedSqlException) org.apache.dri\
// ll.jdbc.AlreadyClosedSqlException: Statement is already closed.
assertThat( callLine, containsString( " CALL:" ) );
assertThat( returnLine, containsString( " THROW:" ) );
assertThat( callLine, containsString( "(Statement)" ) );
assertThat( returnLine, containsString( "(Statement)" ) );
assertThat( callLine, containsString( "execute(" ) );
assertThat( returnLine, containsString( "execute(" ) );
assertThat( callLine, not( containsString( "threw:" ) ) );
assertThat( returnLine, containsString( "threw:" ) );
assertThat( callLine, not( anyOf( containsString( "exception" ),
containsString( "Exception" ) ) ) );
assertThat( returnLine, anyOf( containsString( "exception" ),
containsString( "Exception" ) ) );
assertThat( callLine, not( anyOf( containsString( "closed" ),
containsString( "Closed" ) ) ) );
assertThat( returnLine, anyOf( containsString( "closed" ),
containsString( "Closed" ) ) );
}
// TODO: Clean up these assorted remnants; probably move into separate test
// methods.
@Test
public void testUnsortedMethods() throws SQLException {
// Exercise these, even though we don't check results.
proxyDriver.getMajorVersion();
proxyDriver.getMinorVersion();
proxyDriver.jdbcCompliant();
proxyDriver.getParentLogger();
proxyDriver.getPropertyInfo( "jdbc:proxy::" + sabotNode.getJDBCConnectionString(), new Properties() );
final DatabaseMetaData dbMetaData = proxyConnection.getMetaData();
assertThat( dbMetaData, instanceOf( DatabaseMetaData.class ) );
assertThat( dbMetaData, notNullValue() );
assertThat( dbMetaData.getConnection(), sameInstance( proxyConnection ) );
dbMetaData.allTablesAreSelectable();
try {
dbMetaData.ownUpdatesAreVisible( ResultSet.TYPE_FORWARD_ONLY );
fail();
}
catch ( SQLException | RuntimeException e ) {
// expected
}
final ResultSet catalogsResultSet = dbMetaData.getCatalogs();
assertThat( catalogsResultSet, notNullValue() );
assertThat( catalogsResultSet, instanceOf( ResultSet.class ) );
catalogsResultSet.next();
catalogsResultSet.getString( 1 );
catalogsResultSet.getObject( 1 );
final ResultSetMetaData rsMetaData = catalogsResultSet.getMetaData();
assertThat( rsMetaData, notNullValue() );
assertThat( rsMetaData, instanceOf( ResultSetMetaData.class ) );
int colCount = rsMetaData.getColumnCount();
for ( int cx = 1; cx <= colCount; cx++ ) {
catalogsResultSet.getObject( cx );
catalogsResultSet.getString( cx );
try {
catalogsResultSet.getInt( cx );
fail( "Expected some kind of string-to-int exception.");
}
catch ( SQLException e ) {
// expected;
}
}
assertThat( proxyConnection.getMetaData(), sameInstance( dbMetaData ) );
assertThat( catalogsResultSet.getMetaData(), sameInstance( rsMetaData ) );
}
} // class ProxyDriverTest
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.collect.ImmutableList;
import com.google.common.hash.HashCode;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.vfs.Path;
import java.io.IOException;
import java.util.List;
import javax.annotation.Nullable;
/**
* Abstract syntax node for an entire BUILD file.
*/
public class BuildFileAST extends ASTNode {
private final ImmutableList<Statement> stmts;
private final ImmutableList<Comment> comments;
private ImmutableList<LoadStatement> loads;
/**
* Whether any errors were encountered during scanning or parsing.
*/
private final boolean containsErrors;
private final String contentHashCode;
private BuildFileAST(List<Statement> preludeStatements, Parser.ParseResult result) {
this(preludeStatements, result, null);
}
private BuildFileAST(List<Statement> preludeStatements,
Parser.ParseResult result, String contentHashCode) {
this.stmts = ImmutableList.<Statement>builder()
.addAll(preludeStatements)
.addAll(result.statements)
.build();
this.comments = ImmutableList.copyOf(result.comments);
this.containsErrors = result.containsErrors;
this.contentHashCode = contentHashCode;
setLocation(result.location);
}
/** Collects all load statements */
private ImmutableList<LoadStatement> fetchLoads(List<Statement> stmts) {
ImmutableList.Builder<LoadStatement> loads = new ImmutableList.Builder<>();
for (Statement stmt : stmts) {
if (stmt instanceof LoadStatement) {
LoadStatement imp = (LoadStatement) stmt;
loads.add(imp);
}
}
return loads.build();
}
/**
* Returns true if any errors were encountered during scanning or parsing. If
* set, clients should not rely on the correctness of the AST for builds or
* BUILD-file editing.
*/
public boolean containsErrors() {
return containsErrors;
}
/**
* Returns an (immutable, ordered) list of statements in this BUILD file.
*/
public ImmutableList<Statement> getStatements() {
return stmts;
}
/**
* Returns an (immutable, ordered) list of comments in this BUILD file.
*/
public ImmutableList<Comment> getComments() {
return comments;
}
/**
* Returns a list of loads in this BUILD file.
*/
public synchronized ImmutableList<LoadStatement> getImports() {
if (loads == null) {
loads = fetchLoads(stmts);
}
return loads;
}
/**
* Executes this build file in a given Environment.
*
* <p>If, for any reason, execution of a statement cannot be completed, an
* {@link EvalException} is thrown by {@link Statement#exec(Environment)}.
* This exception is caught here and reported through reporter and execution
* continues on the next statement. In effect, there is a "try/except" block
* around every top level statement. Such exceptions are not ignored, though:
* they are visible via the return value. Rules declared in a package
* containing any error (including loading-phase semantical errors that
* cannot be checked here) must also be considered "in error".
*
* <p>Note that this method will not affect the value of {@link
* #containsErrors()}; that refers only to lexer/parser errors.
*
* @return true if no error occurred during execution.
*/
public boolean exec(Environment env, EventHandler eventHandler) throws InterruptedException {
boolean ok = true;
for (Statement stmt : stmts) {
try {
stmt.exec(env);
} catch (EvalException e) {
ok = false;
// Do not report errors caused by a previous parsing error, as it has already been
// reported.
if (e.isDueToIncompleteAST()) {
continue;
}
// When the exception is raised from another file, report first the location in the
// BUILD file (as it is the most probable cause for the error).
Location exnLoc = e.getLocation();
Location nodeLoc = stmt.getLocation();
eventHandler.handle(Event.error(
(exnLoc == null || !nodeLoc.getPath().equals(exnLoc.getPath())) ? nodeLoc : exnLoc,
e.getMessage()));
}
}
return ok;
}
@Override
public String toString() {
return "BuildFileAST" + getStatements();
}
@Override
public void accept(SyntaxTreeVisitor visitor) {
visitor.visit(this);
}
/**
* Parse the specified build file, returning its AST. All errors during
* scanning or parsing will be reported to the reporter.
*
* @throws IOException if the file cannot not be read.
*/
public static BuildFileAST parseBuildFile(Path buildFile, EventHandler eventHandler,
boolean parsePython)
throws IOException {
return parseBuildFile(buildFile, buildFile.getFileSize(), eventHandler, parsePython);
}
public static BuildFileAST parseBuildFile(Path buildFile, long fileSize,
EventHandler eventHandler,
boolean parsePython)
throws IOException {
ParserInputSource inputSource = ParserInputSource.create(buildFile, fileSize);
return parseBuildFile(inputSource, eventHandler, parsePython);
}
/**
* Parse the specified build file, returning its AST. All errors during
* scanning or parsing will be reported to the reporter.
*/
public static BuildFileAST parseBuildFile(ParserInputSource input,
List<Statement> preludeStatements,
EventHandler eventHandler,
boolean parsePython) {
Parser.ParseResult result = Parser.parseFile(input, eventHandler, parsePython);
return new BuildFileAST(preludeStatements, result);
}
public static BuildFileAST parseBuildFile(ParserInputSource input, EventHandler eventHandler,
boolean parsePython) {
Parser.ParseResult result = Parser.parseFile(input, eventHandler, parsePython);
return new BuildFileAST(ImmutableList.<Statement>of(), result);
}
/**
* Parse the specified Skylark file, returning its AST. All errors during
* scanning or parsing will be reported to the reporter.
*
* @throws IOException if the file cannot not be read.
*/
public static BuildFileAST parseSkylarkFile(Path file, EventHandler eventHandler,
ValidationEnvironment validationEnvironment) throws IOException {
return parseSkylarkFile(file, file.getFileSize(), eventHandler,
validationEnvironment);
}
public static BuildFileAST parseSkylarkFile(Path file, long fileSize, EventHandler eventHandler,
ValidationEnvironment validationEnvironment) throws IOException {
ParserInputSource input = ParserInputSource.create(file, fileSize);
Parser.ParseResult result =
Parser.parseFileForSkylark(input, eventHandler, validationEnvironment);
return new BuildFileAST(ImmutableList.<Statement>of(), result,
HashCode.fromBytes(file.getMD5Digest()).toString());
}
/**
* Parse the specified build file, without building the AST.
*
* @return true if the input file is syntactically valid
*/
public static boolean checkSyntax(ParserInputSource input,
EventHandler eventHandler, boolean parsePython) {
return !parseBuildFile(input, eventHandler, parsePython).containsErrors();
}
/**
* Returns a hash code calculated from the string content of the source file of this AST.
*/
@Nullable public String getContentHashCode() {
return contentHashCode;
}
}
| |
/*
* Copyright (C) 2009-2015 FBReader.ORG Limited <contact@fbreader.org>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
package org.geometerplus.android.fbreader.bookmark;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.*;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.view.*;
import android.widget.*;
import yuku.ambilwarna.widget.AmbilWarnaPrefWidgetView;
import org.geometerplus.zlibrary.core.options.ZLStringOption;
import org.geometerplus.zlibrary.core.resources.ZLResource;
import org.geometerplus.zlibrary.core.util.ZLColor;
import org.geometerplus.zlibrary.ui.android.R;
import org.geometerplus.zlibrary.ui.android.util.ZLAndroidColorUtil;
import org.geometerplus.fbreader.book.*;
import org.geometerplus.android.fbreader.api.FBReaderIntents;
import org.geometerplus.android.fbreader.libraryService.BookCollectionShadow;
import org.geometerplus.android.util.OrientationUtil;
import org.geometerplus.android.util.ViewUtil;
public class EditBookmarkActivity extends Activity implements IBookCollection.Listener<Book> {
private final ZLResource myResource = ZLResource.resource("editBookmark");
private final BookCollectionShadow myCollection = new BookCollectionShadow();
private Bookmark myBookmark;
private StyleListAdapter myStylesAdapter;
private void addTab(TabHost host, String id, int content) {
final TabHost.TabSpec spec = host.newTabSpec(id);
spec.setIndicator(myResource.getResource(id).getValue());
spec.setContent(content);
host.addTab(spec);
}
@Override
public void onCreate(Bundle bundle) {
super.onCreate(bundle);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.edit_bookmark);
myBookmark = FBReaderIntents.getBookmarkExtra(getIntent());
if (myBookmark == null) {
finish();
return;
}
final DisplayMetrics dm = getResources().getDisplayMetrics();
final int width = Math.min(
(int)TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 500, dm),
dm.widthPixels * 9 / 10
);
final int height = Math.min(
(int)TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 350, dm),
dm.heightPixels * 9 / 10
);
final TabHost tabHost = (TabHost)findViewById(R.id.edit_bookmark_tabhost);
tabHost.setLayoutParams(new FrameLayout.LayoutParams(
new ViewGroup.LayoutParams(width, height)
));
tabHost.setup();
addTab(tabHost, "text", R.id.edit_bookmark_content_text);
addTab(tabHost, "style", R.id.edit_bookmark_content_style);
addTab(tabHost, "delete", R.id.edit_bookmark_content_delete);
final ZLStringOption currentTabOption =
new ZLStringOption("LookNFeel", "EditBookmark", "text");
tabHost.setCurrentTabByTag(currentTabOption.getValue());
tabHost.setOnTabChangedListener(new TabHost.OnTabChangeListener() {
public void onTabChanged(String tag) {
if (!"delete".equals(tag)) {
currentTabOption.setValue(tag);
}
}
});
final EditText editor = (EditText)findViewById(R.id.edit_bookmark_text);
editor.setText(myBookmark.getText());
final int len = editor.getText().length();
editor.setSelection(len, len);
final Button saveTextButton = (Button)findViewById(R.id.edit_bookmark_save_text_button);
saveTextButton.setEnabled(false);
saveTextButton.setText(myResource.getResource("saveText").getValue());
saveTextButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
myCollection.bindToService(EditBookmarkActivity.this, new Runnable() {
public void run() {
myBookmark.setText(editor.getText().toString());
myCollection.saveBookmark(myBookmark);
saveTextButton.setEnabled(false);
}
});
}
});
editor.addTextChangedListener(new TextWatcher() {
@Override
public void onTextChanged(CharSequence sequence, int start, int before, int count) {
final String originalText = myBookmark.getText();
saveTextButton.setEnabled(!originalText.equals(editor.getText().toString()));
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void afterTextChanged(Editable s) {
}
});
final Button deleteButton = (Button)findViewById(R.id.edit_bookmark_delete_button);
deleteButton.setText(myResource.getResource("deleteBookmark").getValue());
deleteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
myCollection.bindToService(EditBookmarkActivity.this, new Runnable() {
public void run() {
myCollection.deleteBookmark(myBookmark);
finish();
}
});
}
});
}
@Override
protected void onStart() {
super.onStart();
myCollection.bindToService(this, new Runnable() {
public void run() {
final List<HighlightingStyle> styles = myCollection.highlightingStyles();
if (styles.isEmpty()) {
finish();
return;
}
myStylesAdapter = new StyleListAdapter(styles);
final ListView stylesList =
(ListView)findViewById(R.id.edit_bookmark_content_style);
stylesList.setAdapter(myStylesAdapter);
stylesList.setOnItemClickListener(myStylesAdapter);
myCollection.addListener(EditBookmarkActivity.this);
}
});
}
@Override
protected void onDestroy() {
myCollection.unbind();
super.onDestroy();
}
// method from IBookCollection.Listener
public void onBookEvent(BookEvent event, Book book) {
if (event == BookEvent.BookmarkStyleChanged) {
myStylesAdapter.setStyleList(myCollection.highlightingStyles());
}
}
// method from IBookCollection.Listener
public void onBuildEvent(IBookCollection.Status status) {
}
private class StyleListAdapter extends BaseAdapter implements AdapterView.OnItemClickListener {
private final List<HighlightingStyle> myStyles;
StyleListAdapter(List<HighlightingStyle> styles) {
myStyles = new ArrayList<HighlightingStyle>(styles);
}
public synchronized void setStyleList(List<HighlightingStyle> styles) {
myStyles.clear();
myStyles.addAll(styles);
notifyDataSetChanged();
}
public final synchronized int getCount() {
return myStyles.size();
}
public final synchronized HighlightingStyle getItem(int position) {
return myStyles.get(position);
}
public final long getItemId(int position) {
return position;
}
public final synchronized View getView(int position, View convertView, final ViewGroup parent) {
final View view = convertView != null
? convertView
: LayoutInflater.from(parent.getContext()).inflate(R.layout.style_item, parent, false);
final HighlightingStyle style = getItem(position);
final CheckBox checkBox = (CheckBox)ViewUtil.findView(view, R.id.style_item_checkbox);
final AmbilWarnaPrefWidgetView colorView =
(AmbilWarnaPrefWidgetView)ViewUtil.findView(view, R.id.style_item_color);
final TextView titleView = ViewUtil.findTextView(view, R.id.style_item_title);
final Button button = (Button)ViewUtil.findView(view, R.id.style_item_edit_button);
checkBox.setChecked(style.Id == myBookmark.getStyleId());
colorView.setVisibility(View.VISIBLE);
BookmarksUtil.setupColorView(colorView, style);
titleView.setText(BookmarkUtil.getStyleName(style));
button.setVisibility(View.VISIBLE);
button.setText(myResource.getResource("editStyle").getValue());
button.setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View view) {
startActivity(
new Intent(EditBookmarkActivity.this, EditStyleActivity.class)
.putExtra(EditStyleActivity.STYLE_ID_KEY, style.Id)
);
}
});
return view;
}
public final synchronized void onItemClick(AdapterView<?> parent, View view, int position, long id) {
final HighlightingStyle style = getItem(position);
myCollection.bindToService(EditBookmarkActivity.this, new Runnable() {
public void run() {
myBookmark.setStyleId(style.Id);
myCollection.setDefaultHighlightingStyleId(style.Id);
myCollection.saveBookmark(myBookmark);
}
});
notifyDataSetChanged();
}
}
}
| |
//======================================================================================
// Copyright 5AM Solutions Inc, Yale University
//
// Distributed under the OSI-approved BSD 3-Clause License.
// See http://ncip.github.com/caarray/LICENSE.txt for details.
//======================================================================================
package caarray.client.examples.grid;
import gov.nih.nci.caarray.external.v1_0.CaArrayEntityReference;
import gov.nih.nci.caarray.external.v1_0.data.AbstractDataColumn;
import gov.nih.nci.caarray.external.v1_0.data.BooleanColumn;
import gov.nih.nci.caarray.external.v1_0.data.DataSet;
import gov.nih.nci.caarray.external.v1_0.data.DataType;
import gov.nih.nci.caarray.external.v1_0.data.DesignElement;
import gov.nih.nci.caarray.external.v1_0.data.DoubleColumn;
import gov.nih.nci.caarray.external.v1_0.data.File;
import gov.nih.nci.caarray.external.v1_0.data.FileCategory;
import gov.nih.nci.caarray.external.v1_0.data.FileType;
import gov.nih.nci.caarray.external.v1_0.data.FloatColumn;
import gov.nih.nci.caarray.external.v1_0.data.HybridizationData;
import gov.nih.nci.caarray.external.v1_0.data.IntegerColumn;
import gov.nih.nci.caarray.external.v1_0.data.LongColumn;
import gov.nih.nci.caarray.external.v1_0.data.QuantitationType;
import gov.nih.nci.caarray.external.v1_0.data.ShortColumn;
import gov.nih.nci.caarray.external.v1_0.data.StringColumn;
import gov.nih.nci.caarray.external.v1_0.experiment.Experiment;
import gov.nih.nci.caarray.external.v1_0.query.DataSetRequest;
import gov.nih.nci.caarray.external.v1_0.query.ExampleSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.ExperimentSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.FileSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.HybridizationSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.query.QuantitationTypeSearchCriteria;
import gov.nih.nci.caarray.external.v1_0.sample.Hybridization;
import gov.nih.nci.caarray.services.external.v1_0.InvalidInputException;
import gov.nih.nci.caarray.services.external.v1_0.grid.client.CaArraySvc_v1_0Client;
import gov.nih.nci.caarray.services.external.v1_0.grid.client.GridSearchApiUtils;
import gov.nih.nci.caarray.services.external.v1_0.search.SearchApiUtils;
import java.io.IOException;
import java.rmi.RemoteException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.axis.types.URI.MalformedURIException;
/**
* A client downloading data columns from hybridizations using the caArray Grid service API.
*
* @author Rashmi Srinivasa
*/
public class DownloadDataColumnsFromHybridizations {
private static CaArraySvc_v1_0Client client = null;
private static SearchApiUtils searchServiceHelper = null;
private static final String EXPERIMENT_TITLE = BaseProperties.AFFYMETRIX_EXPERIMENT;
private static final String QUANTITATION_TYPES_CSV_STRING = BaseProperties.AFFYMETRIX_CHP_QUANTITATION_TYPES;
public static void main(String[] args) {
DownloadDataColumnsFromHybridizations downloader = new DownloadDataColumnsFromHybridizations();
try {
client = new CaArraySvc_v1_0Client(BaseProperties.getGridServiceUrl());
searchServiceHelper = new GridSearchApiUtils(client);
System.out.println("Downloading data columns from hybridizations in " + EXPERIMENT_TITLE + "...");
downloader.download();
} catch (Throwable t) {
System.out.println("Error while downloading data columns from hybridizations.");
t.printStackTrace();
}
}
private void download() throws RemoteException, MalformedURIException, IOException, Exception {
DataSetRequest dataSetRequest = new DataSetRequest();
// Select an experiment of interest.
CaArrayEntityReference experimentRef = selectExperiment();
if (experimentRef == null) {
System.err.println("Could not find experiment with the requested title.");
return;
}
// Select hybridizations in the experiment.
Set<CaArrayEntityReference> hybridizationRefs = selectHybridizations(experimentRef);
if (hybridizationRefs == null) {
System.err.println("Could not find any hybridizations with CHP data in the selected experiment.");
return;
}
dataSetRequest.setHybridizations(hybridizationRefs);
// Select the quantitation types (columns) of interest.
QuantitationTypeSearchCriteria qtCrit = new QuantitationTypeSearchCriteria();
qtCrit.setHybridization(hybridizationRefs.iterator().next());
qtCrit.getFileCategories().add(FileCategory.DERIVED_DATA);
QuantitationType[] qtypes = client.searchForQuantitationTypes(qtCrit);
Set<CaArrayEntityReference> quantitationTypeRefs = new HashSet<CaArrayEntityReference>();
for (QuantitationType qt : qtypes) {
quantitationTypeRefs.add(qt.getReference());
}
System.out.println("Retrieved quant types: " + Arrays.asList(qtypes));
if (quantitationTypeRefs == null) {
System.err.println("Could not find one or more of the requested quantitation types: " + QUANTITATION_TYPES_CSV_STRING);
return;
}
dataSetRequest.setQuantitationTypes(quantitationTypeRefs);
// Retrieve the parsed data set.
DataSet dataSet = client.getDataSet(dataSetRequest);
if (dataSet == null) {
System.err.println("Retrieved null data set.");
return;
}
printDataSet(dataSet);
}
/**
* Search for experiments and select one.
*/
private CaArrayEntityReference selectExperiment() throws RemoteException {
// Search for experiment with the given title.
ExperimentSearchCriteria experimentSearchCriteria = new ExperimentSearchCriteria();
experimentSearchCriteria.setTitle(EXPERIMENT_TITLE);
// ... OR Search for experiment with the given public identifier.
// ExperimentSearchCriteria experimentSearchCriteria = new ExperimentSearchCriteria();
// experimentSearchCriteria.setPublicIdentifier(EXPERIMENT_PUBLIC_IDENTIFIER);
List<Experiment> experiments = (client.searchForExperiments(experimentSearchCriteria, null)).getResults();
if (experiments == null || experiments.size() <= 0) {
return null;
}
// Assuming that only one experiment was found, pick the first result.
// This will always be true for a search by public identifier, but may not be true for a search by title.
Experiment experiment = experiments.get(0);
return experiment.getReference();
}
/**
* Select all hybridizations in the given experiment that have CHP data.
*/
private Set<CaArrayEntityReference> selectHybridizations(CaArrayEntityReference experimentRef) throws RemoteException, InvalidInputException {
HybridizationSearchCriteria searchCriteria = new HybridizationSearchCriteria();
searchCriteria.setExperiment(experimentRef);
List<Hybridization> hybridizations = (searchServiceHelper.hybridizationsByCriteria(searchCriteria)).list();
if (hybridizations == null || hybridizations.size() <= 0) {
return null;
}
// Get references to the hybridizations.
Set<CaArrayEntityReference> hybridizationRefs = new HashSet<CaArrayEntityReference>();
for (Hybridization hybridization : hybridizations) {
hybridizationRefs.add(hybridization.getReference());
}
// Check if the hybridizations have CHP files associated with them.
if (haveChpFiles(experimentRef, hybridizationRefs)) {
return hybridizationRefs;
} else {
return null;
}
}
private boolean haveChpFiles(CaArrayEntityReference experimentRef, Set<CaArrayEntityReference> hybridizationRefs) throws RemoteException {
FileSearchCriteria searchCriteria = new FileSearchCriteria();
searchCriteria.setExperiment(experimentRef);
CaArrayEntityReference chpFileTypeRef = getChpFileType();
// CaArrayEntityReference chpFileTypeRef = new CaArrayEntityReference("URN:LSID:caarray.nci.nih.gov:gov.nih.nci.caarray.external.v1_0.data.FileType:AFFYMETRIX_CHP");
searchCriteria.setExperimentGraphNodes(hybridizationRefs);
searchCriteria.getTypes().add(chpFileTypeRef);
List<File> dataFiles = (client.searchForFiles(searchCriteria, null)).getResults();
if (dataFiles == null || dataFiles.size() == 0) {
return false;
} else {
return true;
}
}
private CaArrayEntityReference getChpFileType() throws RemoteException {
ExampleSearchCriteria<FileType> criteria = new ExampleSearchCriteria<FileType>();
FileType exampleFileType = new FileType();
exampleFileType.setName("AFFYMETRIX_CHP");
criteria.setExample(exampleFileType);
List<FileType> fileTypes = (client.searchByExample(criteria, null)).getResults();
FileType chpFileType = fileTypes.iterator().next();
return chpFileType.getReference();
}
private Set<CaArrayEntityReference> selectQuantitationTypes() throws RemoteException {
ExampleSearchCriteria<QuantitationType> criteria = new ExampleSearchCriteria<QuantitationType>();
Set<CaArrayEntityReference> quantitationTypeRefs = new HashSet<CaArrayEntityReference>();
String[] quantitationTypeNames = QUANTITATION_TYPES_CSV_STRING.split(",");
for (String quantitationTypeName : quantitationTypeNames) {
QuantitationType exampleQuantitationType = new QuantitationType();
exampleQuantitationType.setName(quantitationTypeName);
criteria.setExample(exampleQuantitationType);
List<QuantitationType> quantitationTypes = (client.searchByExample(criteria, null)).getResults();
if (quantitationTypes == null || quantitationTypes.size() <= 0) {
return null;
}
QuantitationType quantitationType = quantitationTypes.iterator().next();
quantitationTypeRefs.add(quantitationType.getReference());
}
return quantitationTypeRefs;
}
private void printDataSet(DataSet dataSet) {
// Ordered list of row headers (probe sets)
List<DesignElement> probeSets = dataSet.getDesignElements();
printProbeSets(probeSets);
// Ordered list of column headers (quantitation types like Signal, Log Ratio etc.)
List<QuantitationType> quantitationTypes = dataSet.getQuantitationTypes();
// Data for the first hybridization (the only hybridization, in our case)
HybridizationData data = dataSet.getDatas().get(0);
// Ordered list of columns with values (columns are in the same order as column headers/quantitation types)
List<AbstractDataColumn> dataColumns = data.getDataColumns();
Iterator columnIterator = dataColumns.iterator();
for (QuantitationType quantitationType : quantitationTypes) {
System.out.println("Column = " + quantitationType.getName() + "; Data type = " + quantitationType.getDataType());
AbstractDataColumn dataColumn = (AbstractDataColumn) columnIterator.next();
// Ordered list of values in the column (values are in the same order as row headers/probe sets)
printColumnValues(quantitationType, dataColumn);
}
}
private void printProbeSets(List<DesignElement> probeSets) {
System.out.print("Probe Sets: ");
for (DesignElement probeSet : probeSets) {
System.out.print(probeSet.getName() + " ");
}
System.out.println();
}
private void printColumnValues(QuantitationType quantitationType, AbstractDataColumn dataColumn) {
// Extract individual values in the column according to its type.
DataType columnDataType = quantitationType.getDataType();
switch (columnDataType) {
case BOOLEAN:
boolean[] booleanValues = ((BooleanColumn) dataColumn).getValues();
System.out.println("Retrieved " + booleanValues.length + " boolean values.");
break;
case INTEGER:
int[] intValues = ((IntegerColumn) dataColumn).getValues();
System.out.println("Retrieved " + intValues.length + " int values.");
break;
case DOUBLE:
double[] doubleValues = ((DoubleColumn) dataColumn).getValues();
System.out.println("Retrieved " + doubleValues.length + " double values.");
break;
case FLOAT:
float[] floatValues = ((FloatColumn) dataColumn).getValues();
System.out.println("Retrieved " + floatValues.length + " float values.");
break;
case SHORT:
short[] shortValues = ((ShortColumn) dataColumn).getValues();
System.out.println("Retrieved " + shortValues.length + " short values.");
break;
case LONG:
long[] longValues = ((LongColumn) dataColumn).getValues();
System.out.println("Retrieved " + longValues.length + " long values.");
break;
case STRING:
String[] stringValues = ((StringColumn) dataColumn).getValues();
System.out.println("Retrieved " + stringValues.length + " String values.");
break;
default:
// Should never get here.
}
}
}
| |
package com.f8full.sample.directionsonmapv2withretrofit;
import android.app.Fragment;
import android.graphics.Color;
import android.location.Location;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.f8full.sample.directionsonmapv2withretrofit.api.GoogleMapsDirectionsApi;
import com.f8full.sample.directionsonmapv2withretrofit.api.model.GuidanceAnswerRoot;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationRequest;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapFragment;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.location.LocationListener;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.PolylineOptions;
import com.google.maps.android.PolyUtil;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import retrofit.Call;
import retrofit.Callback;
import retrofit.Response;
/**
* A placeholder fragment containing a simple view.
*/
public class DirectionsMapFragment extends Fragment implements
GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener,
LocationListener,
GoogleMap.OnMapLongClickListener,
OnMapReadyCallback {
private GoogleMap mMapInterface = null;
private GoogleApiClient mGoogleApiClient;
private LatLng mUserLatLng = null;
// These settings are the same as the settings for the map. They will in fact give you updates
// at the maximal rates currently possible.
private static final LocationRequest mREQUEST = LocationRequest.create()
.setInterval(5000) // 5 seconds
.setFastestInterval(16) // 16ms = 60fps
.setPriority(LocationRequest.PRIORITY_HIGH_ACCURACY);
private Button mButtonGo;
private TextView mTextDistance;
private TextView mTextDuration;
private ProgressBar mProgressBar;
public DirectionsMapFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View inflatedView = inflater.inflate(R.layout.fragment_map, container, false);
mButtonGo = (Button)inflatedView.findViewById(R.id.button);
mButtonGo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mMapInterface.clear();
mTextDistance.setText("");
mTextDuration.setText("");
mButtonGo.setEnabled(false);
}
});
mTextDistance = (TextView)inflatedView.findViewById(R.id.trip_distance);
mTextDuration = (TextView)inflatedView.findViewById(R.id.trip_time);
mProgressBar = (ProgressBar)inflatedView.findViewById(R.id.progressBar);
mTextDistance.setTextColor(Color.LTGRAY);
mTextDuration.setTextColor(Color.LTGRAY);
if(mMapInterface == null)
((MapFragment) getActivity().getFragmentManager().findFragmentById(R.id.map)).getMapAsync(this);
mGoogleApiClient = new GoogleApiClient.Builder(getActivity().getApplicationContext())
.addApi(LocationServices.API)
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
return inflatedView;
}
@Override
public void onResume() {
super.onResume();
mGoogleApiClient.connect();
}
@Override
public void onPause() {
super.onPause();
mGoogleApiClient.disconnect();
}
@Override
public void onMapReady(GoogleMap googleMap) {
mMapInterface = googleMap;
mMapInterface.setMyLocationEnabled(true);
}
@Override
public void onConnected(Bundle bundle) {
LocationServices.FusedLocationApi.requestLocationUpdates(
mGoogleApiClient,
mREQUEST,
this); // LocationListener
}
@Override
public void onConnectionSuspended(int i) {
//Nothing
}
@Override
public void onLocationChanged(Location location) {
LatLng newUserLatLng = new LatLng(location.getLatitude(), location.getLongitude());
if (mUserLatLng == null)
{
mMapInterface.setOnMapLongClickListener(this);
mMapInterface.animateCamera(CameraUpdateFactory.newLatLngZoom(newUserLatLng, 15));
Toast.makeText(getActivity().getApplicationContext(), "Long press to set destination", Toast.LENGTH_LONG).show();
}
mUserLatLng = newUserLatLng;
}
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
}
@Override
public void onMapLongClick(final LatLng latLng) {
if (mUserLatLng != null && mProgressBar.getVisibility() != View.VISIBLE) {
mMapInterface.clear();
mTextDistance.setText("");
mTextDuration.setText("");
mButtonGo.setEnabled(false);
Map<String, String> UrlParams = new HashMap<>();
UrlParams.put("origin", mUserLatLng.latitude + "," + mUserLatLng.longitude);
UrlParams.put("destination", latLng.latitude + "," + latLng.longitude);
UrlParams.put("sensor", "false");
UrlParams.put("mode","bicycling");
GoogleMapsDirectionsApi api = ((RootApplication) getActivity().getApplication()).getGoogleMapsDirectionsApi();
final Call<GuidanceAnswerRoot> call = api.getDirections(UrlParams);
mProgressBar.setVisibility(View.VISIBLE);
call.enqueue(new Callback<GuidanceAnswerRoot>() {
@Override
public void onResponse(Response<GuidanceAnswerRoot> response) {
// Get result Repo from response.body()
mProgressBar.setVisibility(View.GONE);
GuidanceAnswerRoot answer = response.body();
if (!answer.routes.isEmpty()) {
String encodedPoints = answer.routes.get(0).overview_polyline.points;
List<LatLng> latLngs = PolyUtil.decode(encodedPoints);
//Log.d("MYTAG", "origin " + mUserLatLng.latitude + "," + mUserLatLng.longitude + " _ " + "destination " + latLng.latitude + "," + latLng.longitude );
//Add the polyline to map
mMapInterface.addPolyline(new PolylineOptions()
.addAll(latLngs)
.width(5)
.color(Color.rgb(255,64,129)));
//See BudgetTrackDetailsFragment
//display summaring Toast
if (answer.routes.get(0).legs.size() == 1) //One leg trip, directly use data as string in UI
{
mTextDistance.setText(answer.routes.get(0).legs.get(0).distance.text);
mTextDuration.setText(answer.routes.get(0).legs.get(0).duration.text);
//Toast.makeText(getActivity().getApplicationContext(), answer.routes.get(0).legs.get(0).distance.text + " " + answer.routes.get(0).legs.get(0).duration.text, Toast.LENGTH_LONG).show();
mButtonGo.setEnabled(true);
}
}
else
{
Toast.makeText(getActivity().getApplicationContext(), "No route!", Toast.LENGTH_SHORT).show();
}
}
@Override
public void onFailure(Throwable t) {
Toast.makeText(getActivity().getApplicationContext(), "Couldn't retrieve directions", Toast.LENGTH_SHORT).show();
mProgressBar.setVisibility(View.GONE);
}
});
}
//https://maps.googleapis.com/maps/api/directions/json?origin=Chicago,IL&destination=Los%20Angeles,CA&sensor=false
}
}
| |
/*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2002,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.dom;
import org.w3c.dom.Entity;
import org.w3c.dom.Node;
import org.w3c.dom.DOMException;
/**
* Entity nodes hold the reference data for an XML Entity -- either
* parsed or unparsed. The nodeName (inherited from Node) will contain
* the name (if any) of the Entity. Its data will be contained in the
* Entity's children, in exactly the structure which an
* EntityReference to this name will present within the document's
* body.
* <P>
* Note that this object models the actual entity, _not_ the entity
* declaration or the entity reference.
* <P>
* An XML processor may choose to completely expand entities before
* the structure model is passed to the DOM; in this case, there will
* be no EntityReferences in the DOM tree.
* <P>
* Quoting the 10/01 DOM Proposal,
* <BLOCKQUOTE>
* "The DOM Level 1 does not support editing Entity nodes; if a user
* wants to make changes to the contents of an Entity, every related
* EntityReference node has to be replaced in the structure model by
* a clone of the Entity's contents, and then the desired changes
* must be made to each of those clones instead. All the
* descendants of an Entity node are readonly."
* </BLOCKQUOTE>
* I'm interpreting this as: It is the parser's responsibilty to call
* the non-DOM operation setReadOnly(true,true) after it constructs
* the Entity. Since the DOM explicitly decided not to deal with this,
* _any_ answer will involve a non-DOM operation, and this is the
* simplest solution.
*
* @xerces.internal
*
* @author Elena Litani, IBM
* @since PR-DOM-Level-1-19980818.
*/
public class EntityImpl
extends ParentNode
implements Entity {
//
// Constants
//
/** Serialization version. */
static final long serialVersionUID = -3575760943444303423L;
//
// Data
//
/** Entity name. */
protected String name;
/** Public identifier. */
protected String publicId;
/** System identifier. */
protected String systemId;
/** Encoding */
protected String encoding;
/** Input Encoding */
protected String inputEncoding;
/** Version */
protected String version;
/** Notation name. */
protected String notationName;
/** base uri*/
protected String baseURI;
//
// Constructors
//
/** Factory constructor. */
public EntityImpl(CoreDocumentImpl ownerDoc, String name) {
super(ownerDoc);
this.name = name;
isReadOnly(true);
}
//
// Node methods
//
/**
* A short integer indicating what type of node this is. The named
* constants for this value are defined in the org.w3c.dom.Node interface.
*/
public short getNodeType() {
return Node.ENTITY_NODE;
}
/**
* Returns the entity name
*/
public String getNodeName() {
if (needsSyncData()) {
synchronizeData();
}
return name;
}
/**
* Sets the node value.
* @throws DOMException(NO_MODIFICATION_ALLOWED_ERR)
*/
public void setNodeValue(String x)
throws DOMException {
if (ownerDocument.errorChecking && isReadOnly()) {
String msg = DOMMessageFormatter.formatMessage(DOMMessageFormatter.DOM_DOMAIN, "NO_MODIFICATION_ALLOWED_ERR", null);
throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR, msg);
}
}
/**
* The namespace prefix of this node
* @exception DOMException
* <br>NO_MODIFICATION_ALLOWED_ERR: Raised if this node is readonly.
*/
public void setPrefix(String prefix)
throws DOMException
{
if (ownerDocument.errorChecking && isReadOnly()) {
throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR,
DOMMessageFormatter.formatMessage(DOMMessageFormatter.DOM_DOMAIN,
"NO_MODIFICATION_ALLOWED_ERR", null));
}
}
/** Clone node. */
public Node cloneNode(boolean deep) {
EntityImpl newentity = (EntityImpl)super.cloneNode(deep);
newentity.setReadOnly(true, deep);
return newentity;
}
//
// Entity methods
//
/**
* The public identifier associated with the entity. If not specified,
* this will be null.
*/
public String getPublicId() {
if (needsSyncData()) {
synchronizeData();
}
return publicId;
} // getPublicId():String
/**
* The system identifier associated with the entity. If not specified,
* this will be null.
*/
public String getSystemId() {
if (needsSyncData()) {
synchronizeData();
}
return systemId;
} // getSystemId():String
/**
* DOM Level 3 WD - experimental
* the version number of this entity, when it is an external parsed entity.
*/
public String getXmlVersion() {
if (needsSyncData()) {
synchronizeData();
}
return version;
} // getVersion():String
/**
* DOM Level 3 WD - experimental
* the encoding of this entity, when it is an external parsed entity.
*/
public String getXmlEncoding() {
if (needsSyncData()) {
synchronizeData();
}
return encoding;
} // getVersion():String
/**
* Unparsed entities -- which contain non-XML data -- have a
* "notation name" which tells applications how to deal with them.
* Parsed entities, which <em>are</em> in XML format, don't need this and
* set it to null.
*/
public String getNotationName() {
if (needsSyncData()) {
synchronizeData();
}
return notationName;
} // getNotationName():String
//
// Public methods
//
/**
* DOM Level 2: The public identifier associated with the entity. If not specified,
* this will be null. */
public void setPublicId(String id) {
if (needsSyncData()) {
synchronizeData();
}
publicId = id;
} // setPublicId(String)
/**
* NON-DOM
* encoding - An attribute specifying, as part of the text declaration,
* the encoding of this entity, when it is an external parsed entity.
* This is null otherwise
*
*/
public void setXmlEncoding(String value) {
if (needsSyncData()) {
synchronizeData();
}
encoding = value;
} // setEncoding (String)
/**
* An attribute specifying the encoding used for this entity at the tiome
* of parsing, when it is an external parsed entity. This is
* <code>null</code> if it an entity from the internal subset or if it
* is not known..
* @since DOM Level 3
*/
public String getInputEncoding(){
if (needsSyncData()) {
synchronizeData();
}
return inputEncoding;
}
/**
* NON-DOM, used to set the input encoding.
*/
public void setInputEncoding(String inputEncoding){
if (needsSyncData()) {
synchronizeData();
}
this.inputEncoding = inputEncoding;
}
/**
* NON-DOM
* version - An attribute specifying, as part of the text declaration,
* the version number of this entity, when it is an external parsed entity.
* This is null otherwise
*/
public void setXmlVersion(String value) {
if (needsSyncData()) {
synchronizeData();
}
version = value;
} // setVersion (String)
/**
* DOM Level 2: The system identifier associated with the entity. If not
* specified, this will be null.
*/
public void setSystemId(String id) {
if (needsSyncData()) {
synchronizeData();
}
systemId = id;
} // setSystemId(String)
/**
* DOM Level 2: Unparsed entities -- which contain non-XML data -- have a
* "notation name" which tells applications how to deal with them.
* Parsed entities, which <em>are</em> in XML format, don't need this and
* set it to null.
*/
public void setNotationName(String name) {
if (needsSyncData()) {
synchronizeData();
}
notationName = name;
} // setNotationName(String)
/**
* Returns the absolute base URI of this node or null if the implementation
* wasn't able to obtain an absolute URI. Note: If the URI is malformed, a
* null is returned.
*
* @return The absolute base URI of this node or null.
* @since DOM Level 3
*/
public String getBaseURI() {
if (needsSyncData()) {
synchronizeData();
}
return (baseURI!=null)?baseURI:((CoreDocumentImpl)getOwnerDocument()).getBaseURI();
}
/** NON-DOM: set base uri*/
public void setBaseURI(String uri){
if (needsSyncData()) {
synchronizeData();
}
baseURI = uri;
}
} // class EntityImpl
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* LaunchPermissionListType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* LaunchPermissionListType bean class
*/
public class LaunchPermissionListType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = LaunchPermissionListType
Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for Item
* This was an Array!
*/
protected com.amazon.ec2.LaunchPermissionItemType[] localItem ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localItemTracker = false ;
/**
* Auto generated getter method
* @return com.amazon.ec2.LaunchPermissionItemType[]
*/
public com.amazon.ec2.LaunchPermissionItemType[] getItem(){
return localItem;
}
/**
* validate the array for Item
*/
protected void validateItem(com.amazon.ec2.LaunchPermissionItemType[] param){
}
/**
* Auto generated setter method
* @param param Item
*/
public void setItem(com.amazon.ec2.LaunchPermissionItemType[] param){
validateItem(param);
if (param != null){
//update the setting tracker
localItemTracker = true;
} else {
localItemTracker = false;
}
this.localItem=param;
}
/**
* Auto generated add method for the array for convenience
* @param param com.amazon.ec2.LaunchPermissionItemType
*/
public void addItem(com.amazon.ec2.LaunchPermissionItemType param){
if (localItem == null){
localItem = new com.amazon.ec2.LaunchPermissionItemType[]{};
}
//update the setting tracker
localItemTracker = true;
java.util.List list =
org.apache.axis2.databinding.utils.ConverterUtil.toList(localItem);
list.add(param);
this.localItem =
(com.amazon.ec2.LaunchPermissionItemType[])list.toArray(
new com.amazon.ec2.LaunchPermissionItemType[list.size()]);
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
LaunchPermissionListType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":LaunchPermissionListType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"LaunchPermissionListType",
xmlWriter);
}
}
if (localItemTracker){
if (localItem!=null){
for (int i = 0;i < localItem.length;i++){
if (localItem[i] != null){
localItem[i].serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","item"),
factory,xmlWriter);
} else {
// we don't have to do any thing since minOccures is zero
}
}
} else {
throw new org.apache.axis2.databinding.ADBException("item cannot be null!!");
}
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
if (localItemTracker){
if (localItem!=null) {
for (int i = 0;i < localItem.length;i++){
if (localItem[i] != null){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/",
"item"));
elementList.add(localItem[i]);
} else {
// nothing to do
}
}
} else {
throw new org.apache.axis2.databinding.ADBException("item cannot be null!!");
}
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static LaunchPermissionListType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
LaunchPermissionListType object =
new LaunchPermissionListType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"LaunchPermissionListType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (LaunchPermissionListType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
java.util.ArrayList list1 = new java.util.ArrayList();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","item").equals(reader.getName())){
// Process the array and step past its final element's end.
list1.add(com.amazon.ec2.LaunchPermissionItemType.Factory.parse(reader));
//loop until we find a start element that is not part of this array
boolean loopDone1 = false;
while(!loopDone1){
// We should be at the end element, but make sure
while (!reader.isEndElement())
reader.next();
// Step out of this element
reader.next();
// Step to next element event.
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isEndElement()){
//two continuous end elements means we are exiting the xml structure
loopDone1 = true;
} else {
if (new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","item").equals(reader.getName())){
list1.add(com.amazon.ec2.LaunchPermissionItemType.Factory.parse(reader));
}else{
loopDone1 = true;
}
}
}
// call the converter utility to convert and set the array
object.setItem((com.amazon.ec2.LaunchPermissionItemType[])
org.apache.axis2.databinding.utils.ConverterUtil.convertToArray(
com.amazon.ec2.LaunchPermissionItemType.class,
list1));
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.